gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package com.wuman.oauth.samples.foursquare; import android.content.Context; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentActivity; import android.support.v4.app.FragmentManager; import android.support.v4.app.LoaderManager; import android.support.v4.content.Loader; import android.text.TextUtils; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.view.Window; import android.webkit.CookieManager; import android.webkit.CookieSyncManager; import android.widget.Button; import android.widget.TextView; import com.google.api.client.auth.oauth2.BearerToken; import com.google.api.client.auth.oauth2.ClientParametersAuthentication; import com.google.api.client.auth.oauth2.Credential; import com.google.api.client.http.GenericUrl; import com.google.api.client.util.Lists; import com.wuman.android.auth.AuthorizationDialogController; import com.wuman.android.auth.AuthorizationFlow; import com.wuman.android.auth.DialogFragmentController; import com.wuman.android.auth.OAuthManager; import com.wuman.android.auth.oauth2.store.SharedPreferencesCredentialStore; import com.wuman.oauth.samples.AsyncResourceLoader; import com.wuman.oauth.samples.AsyncResourceLoader.Result; import com.wuman.oauth.samples.OAuth; import com.wuman.oauth.samples.R; import com.wuman.oauth.samples.SamplesActivity; import com.wuman.oauth.samples.SamplesConstants; import java.io.IOException; import java.util.logging.Logger; import de.keyboardsurfer.android.widget.crouton.Crouton; import de.keyboardsurfer.android.widget.crouton.Style; public class SimpleOAuth2ImplicitActivity extends FragmentActivity { static final Logger LOGGER = Logger.getLogger(SamplesConstants.TAG); @Override protected void onCreate(Bundle savedInstanceState) { requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS); super.onCreate(savedInstanceState); FragmentManager fm = getSupportFragmentManager(); if (fm.findFragmentById(android.R.id.content) == null) { OAuthFragment list = new OAuthFragment(); fm.beginTransaction().add(android.R.id.content, list).commit(); } } @Override protected void onDestroy() { Crouton.cancelAllCroutons(); super.onDestroy(); } public static class OAuthFragment extends Fragment implements LoaderManager.LoaderCallbacks<Result<Credential>> { private static final int LOADER_GET_TOKEN = 0; private static final int LOADER_DELETE_TOKEN = 1; private OAuthManager oauth; private Button button; private TextView message; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setHasOptionsMenu(true); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { super.onCreateOptionsMenu(menu, inflater); inflater.inflate(R.menu.delete_cookies_menu, menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.delete_cookies: { CookieSyncManager.createInstance(getActivity()); CookieManager cookieManager = CookieManager.getInstance(); cookieManager.removeAllCookie(); return true; } default: { return super.onOptionsItemSelected(item); } } } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { return inflater.inflate(R.layout.oauth_login, container, false); } @Override public void onViewCreated(View view, Bundle savedInstanceState) { button = (Button) view.findViewById(android.R.id.button1); setButtonText(R.string.get_token); message = (TextView) view.findViewById(android.R.id.text1); button.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (v.getTag().equals(R.string.get_token)) { if (getLoaderManager().getLoader(LOADER_GET_TOKEN) == null) { getLoaderManager().initLoader(LOADER_GET_TOKEN, null, OAuthFragment.this); } else { getLoaderManager().restartLoader(LOADER_GET_TOKEN, null, OAuthFragment.this); } } else { // R.string.delete_token if (getLoaderManager().getLoader(LOADER_DELETE_TOKEN) == null) { getLoaderManager().initLoader(LOADER_DELETE_TOKEN, null, OAuthFragment.this); } else { getLoaderManager().restartLoader(LOADER_DELETE_TOKEN, null, OAuthFragment.this); } } } }); } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); boolean fullScreen = getActivity().getSharedPreferences("Preference", 0) .getBoolean(SamplesActivity.KEY_AUTH_MODE, false); // setup credential store SharedPreferencesCredentialStore credentialStore = new SharedPreferencesCredentialStore(getActivity(), SamplesConstants.CREDENTIALS_STORE_PREF_FILE, OAuth.JSON_FACTORY); // setup authorization flow AuthorizationFlow flow = new AuthorizationFlow.Builder( BearerToken.authorizationHeaderAccessMethod(), OAuth.HTTP_TRANSPORT, OAuth.JSON_FACTORY, new GenericUrl(FoursquareConstants.TOKEN_SERVER_URL), new ClientParametersAuthentication(FoursquareConstants.CLIENT_ID, null), FoursquareConstants.CLIENT_ID, FoursquareConstants.AUTHORIZATION_IMPLICIT_SERVER_URL) .setScopes(Lists.<String> newArrayList()) .setCredentialStore(credentialStore) .build(); // setup UI controller AuthorizationDialogController controller = new DialogFragmentController(getFragmentManager(), fullScreen) { @Override public String getRedirectUri() throws IOException { return FoursquareConstants.REDIRECT_URL; } @Override public boolean isJavascriptEnabledForWebView() { return true; } @Override public boolean disableWebViewCache() { return false; } @Override public boolean removePreviousCookie() { return false; } }; // instantiate an OAuthManager instance oauth = new OAuthManager(flow, controller); } @Override public Loader<Result<Credential>> onCreateLoader(int id, Bundle args) { getActivity().setProgressBarIndeterminateVisibility(true); button.setEnabled(false); message.setText(""); if (id == LOADER_GET_TOKEN) { return new GetTokenLoader(getActivity(), oauth); } else { return new DeleteTokenLoader(getActivity(), oauth); } } @Override public void onLoadFinished(Loader<Result<Credential>> loader, Result<Credential> result) { if (loader.getId() == LOADER_GET_TOKEN) { message.setText(result.success ? result.data.getAccessToken() : ""); } else { message.setText(""); } if (result.success) { if (loader.getId() == LOADER_GET_TOKEN) { setButtonText(R.string.delete_token); } else { setButtonText(R.string.get_token); } } else { setButtonText(R.string.get_token); Crouton.makeText(getActivity(), result.errorMessage, Style.ALERT).show(); } getActivity().setProgressBarIndeterminateVisibility(false); button.setEnabled(true); } @Override public void onLoaderReset(Loader<Result<Credential>> loader) { message.setText(""); getActivity().setProgressBarIndeterminateVisibility(false); button.setEnabled(true); } @Override public void onDestroy() { getLoaderManager().destroyLoader(LOADER_GET_TOKEN); getLoaderManager().destroyLoader(LOADER_DELETE_TOKEN); super.onDestroy(); } private void setButtonText(int action) { button.setText(action); button.setTag(action); } private static class GetTokenLoader extends AsyncResourceLoader<Credential> { private final OAuthManager oauth; public GetTokenLoader(Context context, OAuthManager oauth) { super(context); this.oauth = oauth; } @Override public Credential loadResourceInBackground() throws Exception { Credential credential = oauth.authorizeImplicitly(getContext().getString(R.string.token_foursquare_implicit), null, null).getResult(); LOGGER.info("token: " + credential.getAccessToken()); return credential; } @Override public void updateErrorStateIfApplicable(AsyncResourceLoader.Result<Credential> result) { Credential data = result.data; result.success = !TextUtils.isEmpty(data.getAccessToken()); result.errorMessage = result.success ? null : "error"; } } private static class DeleteTokenLoader extends AsyncResourceLoader<Credential> { private final OAuthManager oauth; private boolean success; public DeleteTokenLoader(Context context, OAuthManager oauth) { super(context); this.oauth = oauth; } @Override public Credential loadResourceInBackground() throws Exception { success = oauth.deleteCredential(getContext().getString(R.string.token_foursquare_implicit), null, null).getResult(); LOGGER.info("token deleted: " + success); return null; } @Override public void updateErrorStateIfApplicable(Result<Credential> result) { result.success = success; result.errorMessage = result.success ? null : "error"; } } } }
// // OpenForecast - open source, general-purpose forecasting package. // Copyright (C) 2002-2011 Steven R. Gould // // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // License as published by the Free Software Foundation; either // version 2.1 of the License, or (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public // License along with this library; if not, write to the Free Software // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA // package net.sourceforge.openforecast.models; import java.util.Iterator; import net.sourceforge.openforecast.DataPoint; import net.sourceforge.openforecast.DataSet; import net.sourceforge.openforecast.Observation; /** * A time based forecasting model is the base class that implements much of * the common code for models based on a time series. In particular, it was * designed to support the needs of the Weighted Moving Average, as well as * the Single, Double and Triple Exponential Smoothing models. * * <p>These models have an advantage over other forecasting models in that * they smooth out peaks and troughs (or valleys) in a set of observations. * However, they also have several disadvantages. In particular these models * do not produce an actual equation. Therefore, they often are not all that * useful as medium-long range forecasting tools. They can only reliably be * used to forecast a few periods into the future. * @author Steven R. Gould * @since 0.4 */ public abstract class AbstractTimeBasedModel extends AbstractForecastingModel { /** * The name of the independent (time) variable used in this model. */ private String timeVariable = null; /** * Initialized to the time difference (in whatever units time is reported * in) between two consecutive data points. You could also think of this * as the "delta time" between data points. */ private double timeDiff = 0.0; /** * Stores the minimum number of prior periods of data required to produce a * forecast. Since this varies depending on the details of the model, any * subclass must call setMinimumNumberOfPeriods - usually from the * constructor - before init is invoked to provide the correct information. */ private int minPeriods = 0; /** * The observed values are stored for future reference. In this model, * unlike most others, we store all observed values. This is because these * models don't derive any formula from the data, so the values may be * needed later in order to derive future forecasts. */ private DataSet observedValues; /** * The forecast values are stored to save recalculation. In this model, * unlike most others, we store all forecast values. This is because these * models don't derive any formula from the data. */ private DataSet forecastValues; /** * The minimum value of the independent variable supported by this * forecasting model. This is dependent on the data set used to * initialize the model. * @see #maxIndependentValue */ private double minTimeValue; /** * The maximum value of the independent variable supported by this * forecasting model. This is dependent on the data set used to * initialize the model. * @see #minTimeValue */ private double maxTimeValue; /** * Constructs a new time based forecasting model. For a valid model to be * constructed, you should call init and pass in a data set containing a * series of data points. The data set should also have the time variable * initialized to the independent time variable name. */ public AbstractTimeBasedModel() { } /** * Constructs a new time based forecasting model, using the named variable * as the independent (time) variable. * @param timeVariable the name of the independent variable to use as the * time variable in this model. * @deprecated As of 0.4, replaced by {@link #AbstractTimeBasedModel}. */ public AbstractTimeBasedModel( String timeVariable ) { this.timeVariable = timeVariable; } /** * Returns the current number of periods used in this model. This is also * the minimum number of periods required in order to produce a valid * forecast. Since this varies depending on the details of the model, any * subclass must override this to provide the correct information. * @return the minimum number of periods used in this model. */ protected abstract int getNumberOfPeriods(); /** * Used to initialize the time based model. This method must be called * before any other method in the class. Since the time based model does * not derive any equation for forecasting, this method uses the input * DataSet to calculate forecast values for all values of the independent * time variable within the initial data set. * @param dataSet a data set of observations that can be used to initialize * the forecasting parameters of the forecasting model. */ public void init( DataSet dataSet ) { initTimeVariable( dataSet ); if ( dataSet == null || dataSet.size() == 0 ) throw new IllegalArgumentException("Data set cannot be empty in call to init."); int minPeriods = getNumberOfPeriods(); if ( dataSet.size() < minPeriods ) throw new IllegalArgumentException("Data set too small. Need " +minPeriods +" data points, but only " +dataSet.size() +" passed to init."); observedValues = new DataSet( dataSet ); observedValues.sort( timeVariable ); // Check that intervals between data points are consistent // i.e. check for complete data set Iterator<DataPoint> it = observedValues.iterator(); DataPoint dp = it.next(); // first data point double lastValue = dp.getIndependentValue(timeVariable); dp = it.next(); // second data point double currentValue = dp.getIndependentValue(timeVariable); // Create data set in which to save new forecast values forecastValues = new DataSet(); // Determine "standard"/expected time difference between observations timeDiff = currentValue - lastValue; // Min. time value is first observation time minTimeValue = lastValue; while ( it.hasNext() ) { lastValue = currentValue; // Get next data point dp = it.next(); currentValue = dp.getIndependentValue(timeVariable); double diff = currentValue - lastValue; if ( Math.abs(timeDiff - diff) > TOLERANCE ) throw new IllegalArgumentException( "Inconsistent intervals found in time series, using variable '"+timeVariable+"'" ); try { initForecastValue( currentValue ); } catch (IllegalArgumentException ex) { // We can ignore these during initialization } } // Create test data set for determining accuracy indicators // - same as input data set, but without the first n data points DataSet testDataSet = new DataSet( observedValues ); int count = 0; while ( count++ < minPeriods ) testDataSet.remove( (testDataSet.iterator()).next() ); // Calculate accuracy calculateAccuracyIndicators( testDataSet ); } /** * Initializes the time variable from the given data set. If the data set * does not have a time variable explicitly defined, then provided there * is only one independent variable defined for the data set that is used * as the time variable. If more than one independent variable is defined * for the data set, then it is not possible to take an educated guess at * which one is the time variable. In this case, an * IllegalArgumentException will be thrown. * @param dataSet the data set to use to initialize the time variable. * @throws IllegalArgumentException If more than one independent variable * is defined for the data set and no time variable has been specified. To * correct this, be sure to explicitly specify the time variable in the * data set passed to {@link #init}. */ protected void initTimeVariable( DataSet dataSet ) throws IllegalArgumentException { if ( timeVariable == null ) { // Time variable not set, so look at independent variables timeVariable = dataSet.getTimeVariable(); if ( timeVariable == null ) { String[] independentVars = dataSet.getIndependentVariables(); if ( independentVars.length != 1 ) throw new IllegalArgumentException("Unable to determine the independent time variable for the data set passed to init for "+toString()+". Please use DataSet.setTimeVariable before invoking model.init."); timeVariable = independentVars[0]; } } } /** * A helper method that calculates a complete set of forecast values * derived from the given DataSet. These are calculated in advance due * to the way in which forecast values are so dependent on the original * data set. The resulting forecast values are stored in the private * DataSet forecastValues. Additionally, this method initializes the * private member variables minTimeValue and maxTimeValue. * @param dataSet the set of data points on which to base the forecast. * @return a data set containing all "possible" forecast DataPoints * that can reasonably be supported by this forecasting model. "Possible" * forecast DataPoints generally are those which are even partially * based on an observed value, since forecasting purely off of forecast * values tends to be unreliable at best. */ private double initForecastValue( double timeValue ) throws IllegalArgumentException { // Temporary store for current forecast value double forecast = forecast(timeValue); // Create new forecast data point DataPoint dpForecast = new Observation( forecast ); dpForecast.setIndependentValue( timeVariable, timeValue ); // Add new data point to forecast set forecastValues.add( dpForecast ); // Update maximum time value, if necessary if ( timeValue > maxTimeValue ) maxTimeValue = timeValue; return forecast; } /** * Using the current model parameters (initialized in init), apply the * forecast model to the given data point. The data point must have a valid * value for the independent variable. Upon return, the value of the * dependent variable will be updated with the forecast value computed for * that data point. * @param dataPoint the data point for which a forecast value (for the * dependent variable) is required. * @return the same data point passed in but with the dependent value * updated to contain the new forecast value. * @throws ModelNotInitializedException if forecast is called before the * model has been initialized with a call to init. * @throws IllegalArgumentException if the forecast period specified by * the dataPoint is invalid with respect to the historical data * provided. */ public double forecast( DataPoint dataPoint ) throws IllegalArgumentException { if ( !initialized ) throw new ModelNotInitializedException(); // Get value of independent variable (the time variable) double t = dataPoint.getIndependentValue( timeVariable ); return getForecastValue( t ); } /** * Returns the forecast value of the dependent variable for the given * value of the independent time variable. Subclasses must implement * this method in such a manner consistent with the forecasting model * they implement. Subclasses can make use of the getForecastValue and * getObservedValue methods to obtain "earlier" forecasts and * observations respectively. * @param timeValue the value of the time variable for which a forecast * value is required. * @return the forecast value of the dependent variable for the given * time. * @throws IllegalArgumentException if there is insufficient historical * data - observations passed to init - to generate a forecast for the * given time value. */ protected abstract double forecast( double timeValue ) throws IllegalArgumentException; /** * Returns the forecast value for the dependent variable for the given * value of the independent time variable. This method is only intended * for use by models that base future forecasts, in part, on past * forecasts. * @param timeValue the value of the independent time variable for which * the forecast value is required. This value must be greater than the * minimum time value defined by the observations passed into the init * method. * @return the forecast value of the dependent variable for the given * value of the independent time variable. * @throws IllegalArgumentException if the given value of the time * variable was not a valid value for forecasts. */ protected double getForecastValue( double timeValue ) throws IllegalArgumentException { if ( timeValue>=minTimeValue-TOLERANCE && timeValue<=maxTimeValue+TOLERANCE ) { // Find required forecast value in set of // pre-computed forecasts Iterator<DataPoint> it = forecastValues.iterator(); while ( it.hasNext() ) { DataPoint dp = it.next(); double currentTime = dp.getIndependentValue( timeVariable ); // If required data point found, // return pre-computed forecast if ( Math.abs(currentTime-timeValue) < TOLERANCE ) return dp.getDependentValue(); } } try { return initForecastValue( timeValue ); } catch ( IllegalArgumentException idex ) { throw new IllegalArgumentException( "Time value (" + timeValue + ") invalid for Time Based forecasting model. Valid values are in the range " + minTimeValue + "-" + maxTimeValue + " in increments of " + timeDiff + "." ); } } /** * Returns the observed value of the dependent variable for the given * value of the independent time variable. * @param timeValue the value of the independent time variable for which * the observed value is required. * @return the observed value of the dependent variable for the given * value of the independent time variable. * @throws IllegalArgumentException if the given value of the time * variable was not found in the observations originally passed to init. */ protected double getObservedValue( double timeValue ) throws IllegalArgumentException { // Find required forecast value in set of // pre-computed forecasts Iterator<DataPoint> it = observedValues.iterator(); while ( it.hasNext() ) { DataPoint dp = it.next(); double currentTime = dp.getIndependentValue( timeVariable ); // If required data point found, // return pre-computed forecast if ( Math.abs(currentTime-timeValue) < TOLERANCE ) return dp.getDependentValue(); } throw new IllegalArgumentException("No observation found for time value, " +timeVariable+"="+timeValue); } /** * Returns the name of the independent variable representing the time * value used by this model. * @return the name of the independent variable representing the time * value. */ public String getTimeVariable() { return timeVariable; } /** * Returns the minimum value of the independent time variable currently * forecast by this model. * @return the minimum value of the independent time variable. */ public double getMinimumTimeValue() { return minTimeValue; } /** * Returns the maximum value of the independent time variable currently * forecast by this model. * @return the maximum value of the independent time variable. */ public double getMaximumTimeValue() { return maxTimeValue; } /** * Returns the independent variable - or the time variable - used in this * model. * @return the independent variable in this model. */ public String getIndependentVariable() { return timeVariable; } /** * Returns the current time interval between observations. * @return the current time interval between observations. */ protected double getTimeInterval() { return timeDiff; } /** * Returns a one or two word name of this type of forecasting model. Keep * this short. A longer description should be implemented in the toString * method. * @return a string representation of the type of forecasting model * implemented. */ public String getForecastType() { return "Time Based Model"; } /** * This should be overridden to provide a textual description of the * current forecasting model including, where possible, any derived * parameters used. * @return a string representation of the current forecast model, and its * parameters. */ public String toString() { return "time based model, spanning " + getNumberOfPeriods() + " periods and using a time variable of " + timeVariable+"."; } } // Local Variables: // tab-width: 4 // End:
package de.tum.in.www1.exerciseapp.service; import com.google.common.io.Files; import de.tum.in.www1.exerciseapp.ArTEMiSApp; import de.tum.in.www1.exerciseapp.domain.Participation; import de.tum.in.www1.exerciseapp.domain.Repository; import org.apache.commons.io.FileUtils; import org.eclipse.jgit.api.Git; import org.eclipse.jgit.api.PullResult; import org.eclipse.jgit.api.errors.GitAPIException; import org.eclipse.jgit.lib.Ref; import org.eclipse.jgit.revwalk.RevCommit; import org.eclipse.jgit.revwalk.RevWalk; import org.eclipse.jgit.transport.UsernamePasswordCredentialsProvider; import org.junit.Test; import org.junit.runner.RunWith; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.test.context.web.WebAppConfiguration; import org.springframework.transaction.annotation.Transactional; import java.io.File; import java.io.IOException; import java.util.Collection; import static org.assertj.core.api.Assertions.assertThat; /** * Created by Josias Montag on 11.10.16. */ @ActiveProfiles(profiles = "dev,jira,bamboo,bitbucket") @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(classes = ArTEMiSApp.class) @WebAppConfiguration @SpringBootTest @Transactional public class GitServiceIntTest { private final Logger log = LoggerFactory.getLogger(GitService.class); private String remoteTestRepo = "http://127.0.0.1:7990/scm/test/testrepo.git"; private final GitService gitService; @Value("${artemis.bitbucket.user}") private String GIT_USER; @Value("${artemis.bitbucket.password}") private String GIT_PASSWORD; public GitServiceIntTest(GitService gitService) { this.gitService = gitService; } @Test public void testGetOrCheckoutRepositoryForNewRepo() throws IOException, GitAPIException { Participation participation = new Participation(); participation.setRepositoryUrl(remoteTestRepo); Repository repo = gitService.getOrCheckoutRepository(participation); assertThat(repo.getBranch()).isEqualTo("master"); assertThat(repo.getDirectory()).exists(); gitService.deleteLocalRepository(repo); } @Test public void testDeleteLocalRepository() throws IOException, GitAPIException { Participation participation = new Participation(); participation.setRepositoryUrl(remoteTestRepo); Repository repo = gitService.getOrCheckoutRepository(participation); assertThat(repo.getDirectory()).exists(); gitService.deleteLocalRepository(repo); assertThat(repo.getDirectory()).doesNotExist(); } @Test public void testGetOrCheckoutRepositoryForExistingRepo() throws IOException, GitAPIException { Participation participation = new Participation(); participation.setRepositoryUrl(remoteTestRepo); Repository repo = gitService.getOrCheckoutRepository(participation); Repository repo2 = gitService.getOrCheckoutRepository(participation); assertThat(repo.getDirectory()).isEqualTo(repo2.getDirectory()); assertThat(repo2.getBranch()).isEqualTo("master"); assertThat(repo2.getDirectory()).exists(); gitService.deleteLocalRepository(repo2); } @Test public void testListFiles() throws IOException, GitAPIException { Participation participation = new Participation(); participation.setRepositoryUrl(remoteTestRepo); Repository repo = gitService.getOrCheckoutRepository(participation); Collection<de.tum.in.www1.exerciseapp.domain.File> files = gitService.listFiles(repo); assertThat(files.size()).isGreaterThan(0); gitService.deleteLocalRepository(repo); } @Test public void testCommitAndPush() throws IOException, GitAPIException { Participation participation = new Participation(); participation.setRepositoryUrl(remoteTestRepo); Repository repo = gitService.getOrCheckoutRepository(participation); Ref oldHead = repo.findRef("HEAD"); gitService.commitAndPush(repo, "test commit"); Ref head = repo.findRef("HEAD"); assertThat(head).isNotEqualTo(oldHead); RevWalk walk = new RevWalk(repo); RevCommit commit = walk.parseCommit(head.getObjectId()); assertThat(commit.getFullMessage()).isEqualTo("test commit"); // get remote ref Git git = new Git(repo); Collection<Ref> refs = git.lsRemote().setHeads(true).call(); Ref remoteHead = refs.iterator().next(); assertThat(head.getObjectId()).isEqualTo(remoteHead.getObjectId()); gitService.deleteLocalRepository(repo); } @Test public void testPull() throws IOException, GitAPIException { Participation participation = new Participation(); participation.setRepositoryUrl(remoteTestRepo); Repository repo = gitService.getOrCheckoutRepository(participation); Ref oldHead = repo.findRef("HEAD"); // commit File tempDir = Files.createTempDir(); Git git = Git.cloneRepository() .setURI(remoteTestRepo) .setCredentialsProvider(new UsernamePasswordCredentialsProvider(GIT_USER, GIT_PASSWORD)) .setDirectory(tempDir) .call(); git.commit().setMessage("a commit").setAllowEmpty(true).call(); git.push().setCredentialsProvider(new UsernamePasswordCredentialsProvider(GIT_USER, GIT_PASSWORD)).call(); // pull PullResult pullResult = gitService.pull(repo); Ref newHead = repo.findRef("HEAD"); assertThat(oldHead).isNotEqualTo(newHead); RevWalk walk = new RevWalk(repo); RevCommit commit = walk.parseCommit(newHead.getObjectId()); assertThat(commit.getFullMessage()).isEqualTo("a commit"); FileUtils.deleteDirectory(tempDir); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.xcontent; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lease.Releasable; import java.io.IOException; import java.util.Map; /** * Interface for pull - parsing {@link XContent} see {@link XContentType} for supported types. * * To obtain an instance of this class use the following pattern: * * <pre> * XContentType xContentType = XContentType.JSON; * XContentParser parser = xContentType.xContent().createParser("{\"key\" : \"value\"}"); * </pre> */ public interface XContentParser extends Releasable { enum Token { START_OBJECT { @Override public boolean isValue() { return false; } }, END_OBJECT { @Override public boolean isValue() { return false; } }, START_ARRAY { @Override public boolean isValue() { return false; } }, END_ARRAY { @Override public boolean isValue() { return false; } }, FIELD_NAME { @Override public boolean isValue() { return false; } }, VALUE_STRING { @Override public boolean isValue() { return true; } }, VALUE_NUMBER { @Override public boolean isValue() { return true; } }, VALUE_BOOLEAN { @Override public boolean isValue() { return true; } }, // usually a binary value VALUE_EMBEDDED_OBJECT { @Override public boolean isValue() { return true; } }, VALUE_NULL { @Override public boolean isValue() { return false; } }; public abstract boolean isValue(); } enum NumberType { INT, LONG, FLOAT, DOUBLE } XContentType contentType(); Token nextToken() throws IOException; void skipChildren() throws IOException; Token currentToken(); String currentName() throws IOException; Map<String, Object> map() throws IOException; Map<String, Object> mapOrdered() throws IOException; Map<String, Object> mapAndClose() throws IOException; Map<String, Object> mapOrderedAndClose() throws IOException; String text() throws IOException; String textOrNull() throws IOException; /** * Returns a BytesRef holding UTF-8 bytes or null if a null value is {@link Token#VALUE_NULL}. * This method should be used to read text only binary content should be read through {@link #binaryValue()} */ BytesRef utf8BytesOrNull() throws IOException; /** * Returns a BytesRef holding UTF-8 bytes. * This method should be used to read text only binary content should be read through {@link #binaryValue()} */ BytesRef utf8Bytes() throws IOException; /** * Returns a BytesRef holding UTF-8 bytes or null if a null value is {@link Token#VALUE_NULL}. * This method should be used to read text only binary content should be read through {@link #binaryValue()} * @deprecated use {@link #utf8BytesOrNull()} instead */ @Deprecated BytesRef bytesOrNull() throws IOException; /** * Returns a BytesRef holding UTF-8 bytes. * This method should be used to read text only binary content should be read through {@link #binaryValue()} * @deprecated use {@link #utf8Bytes()} instead */ @Deprecated BytesRef bytes() throws IOException; Object objectText() throws IOException; Object objectBytes() throws IOException; /** * Method that can be used to determine whether calling of textCharacters() would be the most efficient way to * access textual content for the event parser currently points to. * * Default implementation simply returns false since only actual * implementation class has knowledge of its internal buffering * state. * * This method shouldn't be used to check if the token contains text or not. */ boolean hasTextCharacters(); char[] textCharacters() throws IOException; int textLength() throws IOException; int textOffset() throws IOException; Number numberValue() throws IOException; NumberType numberType() throws IOException; /** * Is the number type estimated or not (i.e. an int might actually be a long, its just low enough * to be an int). */ boolean estimatedNumberType(); short shortValue(boolean coerce) throws IOException; int intValue(boolean coerce) throws IOException; long longValue(boolean coerce) throws IOException; float floatValue(boolean coerce) throws IOException; double doubleValue(boolean coerce) throws IOException; short shortValue() throws IOException; int intValue() throws IOException; long longValue() throws IOException; float floatValue() throws IOException; double doubleValue() throws IOException; /** * returns true if the current value is boolean in nature. * values that are considered booleans: * - boolean value (true/false) * - numeric integers (=0 is considered as false, !=0 is true) * - one of the following strings: "true","false","on","off","yes","no","1","0" */ boolean isBooleanValue() throws IOException; boolean booleanValue() throws IOException; /** * Reads a plain binary value that was written via one of the following methods: * * <li> * <ul>{@link XContentBuilder#field(String, org.elasticsearch.common.bytes.BytesReference)}</ul> * <ul>{@link XContentBuilder#field(String, byte[], int, int)}}</ul> * <ul>{@link XContentBuilder#field(String, byte[])}}</ul> * </li> * * as well as via their <code>XContentBuilderString</code> variants of the separated value methods. * Note: Do not use this method to read values written with: * <li> * <ul>{@link XContentBuilder#utf8Field(XContentBuilderString, org.apache.lucene.util.BytesRef)}</ul> * <ul>{@link XContentBuilder#utf8Field(String, org.apache.lucene.util.BytesRef)}</ul> * </li> * * these methods write UTF-8 encoded strings and must be read through: * <li> * <ul>{@link XContentParser#utf8Bytes()}</ul> * <ul>{@link XContentParser#utf8BytesOrNull()}}</ul> * <ul>{@link XContentParser#text()} ()}</ul> * <ul>{@link XContentParser#textOrNull()} ()}</ul> * <ul>{@link XContentParser#textCharacters()} ()}}</ul> * </li> * */ byte[] binaryValue() throws IOException; boolean isClosed(); }
/** * Copyright (C) 2004-2011 Jive Software. All rights reserved. * This plugin is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.jivesoftware.sparkimpl.plugin.idle; import org.jivesoftware.Spark; import org.jivesoftware.resource.Res; import org.jivesoftware.smack.packet.Presence; import org.jivesoftware.spark.SparkManager; import org.jivesoftware.spark.plugin.Plugin; import org.jivesoftware.spark.util.StringUtils; import org.jivesoftware.spark.util.log.Log; import org.jivesoftware.sparkimpl.plugin.idle.linux.LinuxIdleTime; import org.jivesoftware.sparkimpl.plugin.idle.mac.MacIdleTime; import org.jivesoftware.sparkimpl.plugin.idle.windows.Win32IdleTime; import org.jivesoftware.sparkimpl.plugin.idle.windows.WinLockListener; //import org.jivesoftware.sparkimpl.plugin.phone.PhonePlugin; import org.jivesoftware.sparkimpl.settings.local.LocalPreferences; import org.jivesoftware.sparkimpl.settings.local.SettingsManager; import java.util.Timer; import java.util.TimerTask; public class UserIdlePlugin extends TimerTask implements Plugin { private final int CHECKTIME = 2; private boolean hasChanged = false; private static final LocalPreferences pref = SettingsManager.getLocalPreferences(); private static Presence latestPresence; private static String statustext; private static boolean IsLocked; public static boolean getDesktopLockStatus() { return IsLocked; } @Override public boolean canShutDown() { return false; } @Override public void initialize() { Timer timer = new Timer(); // Check all 5 seconds timer.schedule(this, (1000 * 10), (1000 * CHECKTIME)); if (Spark.isWindows()) { LockListener isLocked; isLocked = new LockListener(); isLocked.intWinLockListener(); } } private long getIdleTime() { IdleTime idleTime; if (Spark.isWindows()) { idleTime = new Win32IdleTime(); } else if (Spark.isMac()) { idleTime = new MacIdleTime(); } else { // assume/try linux idleTime = new LinuxIdleTime(); } return idleTime.getIdleTimeMillis(); } @Override public void shutdown() { } @Override public void uninstall() { } private void setIdle() { latestPresence = SparkManager.getWorkspace().getStatusBar().getPresence(); if (latestPresence.getStatus().equals(Res.getString("status.online")) || latestPresence.getStatus().equals(Res.getString("status.free.to.chat"))) { statustext = pref.getIdleMessage(); } else { statustext = latestPresence.getStatus(); } if (latestPresence.isAway()) { Log.debug("UserIdlePlugin: Presence is already set to away"); } else { Presence statusPresence = new Presence(Presence.Type.available, StringUtils.modifyWildcards(statustext), 0, Presence.Mode.away); SparkManager.getSessionManager().changePresence(statusPresence); Log.debug("UserIdlePlugin: Setting idle presence"); } } private void setOnline() { /* TODO commented out when PhonePlugin was not ported to Maven. if (PhonePlugin.onPhonePresence != null) { SparkManager.getSessionManager().changePresence(PhonePlugin.onPhonePresence); Log.debug("UserIdlePlugin: Returning from idle/lock - On the Phone"); } else if ((latestPresence.getStatus().contains("On the phone")) && (PhonePlugin.offPhonePresence != null) && ((PhonePlugin.offPhonePresence.getMode().equals(Presence.Mode.dnd)) || (PhonePlugin.offPhonePresence.getMode().equals(Presence.Mode.xa)))) { SparkManager.getSessionManager().changePresence(PhonePlugin.offPhonePresence); Log.debug("UserIdlePlugin: Matched DND/XA - Setting presence from PhonePlugin"); } else if (((latestPresence.getStatus().contains("On the phone")) && (PhonePlugin.offPhonePresence != null) && (PhonePlugin.offPhonePresence.getStatus().contentEquals(statustext)))) { Presence presence = new Presence(Presence.Type.available, PhonePlugin.offPhonePresence.getStatus(), 1, Presence.Mode.available); SparkManager.getSessionManager().changePresence(presence); Log.debug("UserIdlePlugin: Setting presence from PhonePlugin ...."); } else if ((latestPresence.getStatus().contains("On the phone")) && (PhonePlugin.offPhonePresence != null)) { SparkManager.getSessionManager().changePresence(PhonePlugin.offPhonePresence); Log.debug("UserIdlePlugin: Setting presence from PhonePlugin"); } else { */ SparkManager.getSessionManager().changePresence(latestPresence); Log.debug("UserIdlePlugin: Setting presence using latestPresence"); /*}*/ } @Override public void run() { if (pref.isIdleOn()) { if (!SparkManager.getConnection().isConnected()) return; // Windows Desktop Lock if (Spark.isWindows()) { if (IsLocked && !hasChanged) { setIdle(); hasChanged = true; } else if ((getIdleTime() / 1000 > (pref.getIdleTime() * 60)) && !hasChanged && !IsLocked) { setIdle(); hasChanged = true; } else if ((getIdleTime() / 1000 < 10) && hasChanged && !IsLocked) { setOnline(); hasChanged = false; } } if (Spark.isMac()) { if ((getIdleTime() / 1000 > (pref.getIdleTime() * 60)) && !hasChanged) { setIdle(); hasChanged = true; } else if ((getIdleTime() / 1000 < 10) && hasChanged) { setOnline(); hasChanged = false; } } if (Spark.isLinux()) { if ((getIdleTime() / 1000 > (pref.getIdleTime() * 60)) && !hasChanged) { setIdle(); hasChanged = true; } else if ((getIdleTime() / 1000 < 10) && hasChanged) { setOnline(); hasChanged = false; } } } } public static class LockListener { public void intWinLockListener() { new Thread(() -> new WinLockListener() { @Override protected void onMachineLocked(int sessionId) { IsLocked = true; } @Override protected void onMachineUnlocked(int sessionId) { IsLocked = false; } }).start(); } } }
/*- * See the file LICENSE for redistribution information. * * Copyright (c) 2002-2010 Oracle. All rights reserved. * * $Id: ReplicaFeederHandshake.java,v 1.41 2010/01/04 15:50:48 cwl Exp $ */ package com.sleepycat.je.rep.stream; import static com.sleepycat.je.rep.impl.RepParams.GROUP_NAME; import static com.sleepycat.je.rep.impl.RepParams.MAX_CLOCK_DELTA; import static com.sleepycat.je.rep.impl.RepParams.NODE_TYPE; import java.io.IOException; import java.util.logging.Level; import java.util.logging.Logger; import com.sleepycat.je.EnvironmentFailureException; import com.sleepycat.je.JEVersion; import com.sleepycat.je.dbi.DbConfigManager; import com.sleepycat.je.dbi.EnvironmentFailureReason; import com.sleepycat.je.log.LogEntryType; import com.sleepycat.je.rep.NodeType; import com.sleepycat.je.rep.impl.RepImpl; import com.sleepycat.je.rep.impl.node.NameIdPair; import com.sleepycat.je.rep.impl.node.RepNode; import com.sleepycat.je.rep.stream.Protocol.DuplicateNodeReject; import com.sleepycat.je.rep.stream.Protocol.FeederJEVersions; import com.sleepycat.je.rep.stream.Protocol.FeederProtocolVersion; import com.sleepycat.je.rep.stream.Protocol.JEVersionsReject; import com.sleepycat.je.rep.stream.Protocol.NodeGroupInfoOK; import com.sleepycat.je.rep.stream.Protocol.NodeGroupInfoReject; import com.sleepycat.je.rep.stream.Protocol.SNTPResponse; import com.sleepycat.je.rep.utilint.NamedChannel; import com.sleepycat.je.rep.utilint.BinaryProtocol.Message; import com.sleepycat.je.rep.utilint.BinaryProtocol.ProtocolException; import com.sleepycat.je.utilint.LoggerUtils; /** * Implements the Replica side of the handshake protocol between the Replica * and the Feeder. The FeederReplicaHandshake class takes care of the other * side. * * @see <a href="https://sleepycat.oracle.com/pmwiki/pmwiki.php?n=JEHomePage.ReplicaFeederHandshake">FeederReplicaHandshake</a> */ public class ReplicaFeederHandshake { /* The rep node (server or replica) */ private final RepNode repNode; private final NamedChannel namedChannel; private final NameIdPair replicaNameIdPair; private NameIdPair feederNameIdPair; private Protocol protocol = null; /* The JE software versions in use by the Feeder */ @SuppressWarnings("unused") private FeederJEVersions feederJEVersions; /* * The time to wait between retries to establish node info in the master. */ static final int MEMBERSHIP_RETRY_SLEEP_MS = 60*1000; static final int MEMBERSHIP_RETRIES = 0; /* Fields used to track clock skew wrt the feeder. */ private long clockDelay = Long.MAX_VALUE; private long clockDelta = Long.MAX_VALUE; private static int CLOCK_SKEW_MAX_SAMPLE_SIZE = 5; private static final long CLOCK_SKEW_MIN_DELAY_MS = 2; private final int maxClockDelta; private final Logger logger; /** * An instance of this class is created with each new handshake preceding * the setting up of a connection. * * @param repNode the replication node * @param namedChannel the channel to be used for the handshake */ public ReplicaFeederHandshake(RepNode repNode, NamedChannel namedChannel) { this.repNode = repNode; this.namedChannel = namedChannel; RepImpl repImpl = repNode.getRepImpl(); replicaNameIdPair = repNode.getNameIdPair(); maxClockDelta = repImpl.getConfigManager().getDuration(MAX_CLOCK_DELTA); logger = LoggerUtils.getLogger(getClass()); } /** * Negotiates a protocol that both the replica and feeder can support. * * @return the common protocol * * @throws IOException * @throws DuplicateReplicaException * @throws IncompatibleFeederException if a common protocol was * not available */ private Protocol negotiateProtocol() throws IOException { final Protocol defaultProtocol = Protocol.getProtocol(repNode); /* Send over the latest version protocol this replica can support. */ defaultProtocol.write(defaultProtocol.new ReplicaProtocolVersion(), namedChannel); /* * Returns the highest level the feeder can support, or the version we * just sent, if it can support that version */ Message message = defaultProtocol.read(namedChannel); if (message instanceof DuplicateNodeReject) { throw new EnvironmentFailureException (repNode.getRepImpl(), EnvironmentFailureReason.HANDSHAKE_ERROR, "A replica with the name: " + replicaNameIdPair + " is already active with the Feeder:" + feederNameIdPair); } FeederProtocolVersion feederVersion = ((FeederProtocolVersion) message); feederNameIdPair = feederVersion.getNameIdPair(); Protocol configuredProtocol = Protocol.get(repNode, feederVersion.getVersion()); LoggerUtils.fine(logger, repNode.getRepImpl(), "Feeder id: " + feederVersion.getNameIdPair() + "Response message: " + feederVersion.getVersion()); namedChannel.setNameIdPair(feederNameIdPair); LoggerUtils.fine(logger, repNode.getRepImpl(), "Channel Mapping: " + feederNameIdPair + " is at " + namedChannel.getChannel()); if (configuredProtocol == null) { throw new EnvironmentFailureException (repNode.getRepImpl(), EnvironmentFailureReason.PROTOCOL_VERSION_MISMATCH, "Incompatible protocol versions. " + "Version: " + feederVersion.getVersion() + " requested by the Feeder: " + feederNameIdPair + " is not supported by this Replica: " + replicaNameIdPair + ", which is at version: " + defaultProtocol.getVersion()); } return configuredProtocol; } /** * Executes the replica side of the handshake. * @throws ProtocolException */ public Protocol execute() throws IOException, ProtocolException { LoggerUtils.info(logger, repNode.getRepImpl(), "Replica-feeder handshake start"); /* First negotiate the protocol, then use it. */ protocol = negotiateProtocol(); /* Ensure that software versions are compatible. */ verifyVersions(); /* * Now perform the membership information validation part of the * handshake */ verifyMembership(); checkClockSkew(); LoggerUtils.info(logger, repNode.getRepImpl(), "Replica-feeder " + feederNameIdPair.getName() + " handshake completed."); return protocol; } /** * Checks software and log version compatibility. */ private void verifyVersions() throws IOException { protocol.write(protocol.new ReplicaJEVersions(JEVersion.CURRENT_VERSION, LogEntryType.LOG_VERSION), namedChannel); Message message = protocol.read(namedChannel); if (message instanceof JEVersionsReject) { /* The software version is not compatible with the Feeder. */ throw new EnvironmentFailureException (repNode.getRepImpl(), EnvironmentFailureReason.HANDSHAKE_ERROR, " Feeder: " + feederNameIdPair + ". " + ((JEVersionsReject) message).getErrorMessage()); } /* * Just save it for now. Use it as the basis for compatibility checking * in future. */ feederJEVersions = (FeederJEVersions)message; } /** * Exchange membership information messages. */ private void verifyMembership() throws IOException { final RepImpl repImpl = repNode.getRepImpl(); DbConfigManager configManager = repImpl.getConfigManager(); String groupName = configManager.get(GROUP_NAME); NodeType nodeType = NODE_TYPE.getEnumerator(configManager.get(NODE_TYPE)); Message message = protocol.new NodeGroupInfo(groupName, repNode.getGroup().getUUID(), replicaNameIdPair, repImpl.getHostName(), repImpl.getPort(), nodeType, repImpl.isDesignatedPrimary()); protocol.write(message, namedChannel); message = protocol.read(namedChannel); if (message instanceof NodeGroupInfoReject) { NodeGroupInfoReject reject = (NodeGroupInfoReject) message; throw new EnvironmentFailureException (repImpl, EnvironmentFailureReason.HANDSHAKE_ERROR, " Feeder: " + feederNameIdPair + ". " + reject.getErrorMessage()); } if (!(message instanceof NodeGroupInfoOK)) { throw new EnvironmentFailureException (repImpl, EnvironmentFailureReason.HANDSHAKE_ERROR, " Feeder: " + feederNameIdPair + ". " + "Protcol error. Unexpected response " + message); } else if (repNode.getGroup().hasUnknownUUID()) { /* Correct the initial UUID */ repNode.getGroup().setUUID(((NodeGroupInfoOK)message).getUUID()); } } /** * Checks for clock skew wrt the current feeder. It's important that the * clock skew be within an acceptable range so that replica can meet any * time based consistency requirements requested by transactions. The * intent of this check is to draw the attention of the application or the * administrators to the skew, not correct it. * <p> * The scheme implemented below is a variation on the scheme used by <a * href="http://tools.ietf.org/html/rfc2030">SNTP</a> protocol. The Feeder * plays the role of the SNTP server and the replica the role of the client * in this situation. The mechanism used here is rough and does not * guarantee the detection of a clock skew, especially since it's a one * time check done each time a connection is re-established with the * Feeder. The clocks could be in sync at the time of this check and drift * apart over the lifetime of the connection. It's also for this reason * that we do not store the skew value and make compensations using it when * determining replica consistency. * <p> * Replications nodes should therefore ensure that they are using NTP or a * similar time synchronization service to keep time on all the replication * nodes in a group in sync. * <p> * * @throws IOException * @throws EnvironmentFailureException * @throws ProtocolException */ private void checkClockSkew() throws IOException, ProtocolException { boolean isLast = false; int sampleCount = 0; do { /* Iterate until we have a value that's good enough. */ isLast = (++sampleCount >= CLOCK_SKEW_MAX_SAMPLE_SIZE) || (clockDelay <= CLOCK_SKEW_MIN_DELAY_MS); protocol.write(protocol.new SNTPRequest(isLast), namedChannel); SNTPResponse response = protocol.read(namedChannel.getChannel(), SNTPResponse.class); if (response.getDelay() < clockDelay) { clockDelay = response.getDelay(); clockDelta = response.getDelta(); } } while (!isLast); LoggerUtils.logMsg (logger, repNode.getRepImpl(), (Math.abs(clockDelta) >= maxClockDelta) ? Level.SEVERE : Level.FINE, "Round trip delay: " + clockDelay + " ms. " + "Clock delta: " + clockDelta + " ms. " + "Max permissible delta: " + maxClockDelta + " ms."); if (Math.abs(clockDelta) >= maxClockDelta) { throw new EnvironmentFailureException (repNode.getRepImpl(), EnvironmentFailureReason.HANDSHAKE_ERROR, "Clock delta: " + clockDelta + " ms. " + "between Feeder: " + feederNameIdPair.getName() + " and this Replica exceeds max permissible delta: " + maxClockDelta + " ms."); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package java.text; /** * An implementation of {@link CharacterIterator} for strings. */ public final class StringCharacterIterator implements CharacterIterator { String string; int start, end, offset; /** * Constructs a new {@code StringCharacterIterator} on the specified string. * The begin and current indices are set to the beginning of the string, the * end index is set to the length of the string. * * @param value * the source string to iterate over. */ public StringCharacterIterator(String value) { string = value; start = offset = 0; end = string.length(); } /** * Constructs a new {@code StringCharacterIterator} on the specified string * with the current index set to the specified value. The begin index is set * to the beginning of the string, the end index is set to the length of the * string. * * @param value * the source string to iterate over. * @param location * the current index. * @throws IllegalArgumentException * if {@code location} is negative or greater than the length of * the source string. */ public StringCharacterIterator(String value, int location) { string = value; start = 0; end = string.length(); if (location < 0 || location > end) { throw new IllegalArgumentException(); } offset = location; } /** * Constructs a new {@code StringCharacterIterator} on the specified string * with the begin, end and current index set to the specified values. * * @param value * the source string to iterate over. * @param start * the index of the first character to iterate. * @param end * the index one past the last character to iterate. * @param location * the current index. * @throws IllegalArgumentException * if {@code start < 0}, {@code start > end}, {@code location < * start}, {@code location > end} or if {@code end} is greater * than the length of {@code value}. */ public StringCharacterIterator(String value, int start, int end, int location) { string = value; if (start < 0 || end > string.length() || start > end || location < start || location > end) { throw new IllegalArgumentException(); } this.start = start; this.end = end; offset = location; } /** * Returns a new {@code StringCharacterIterator} with the same source * string, begin, end, and current index as this iterator. * * @return a shallow copy of this iterator. * @see java.lang.Cloneable */ @Override public Object clone() { try { return super.clone(); } catch (CloneNotSupportedException e) { throw new AssertionError(e); // android-changed } } /** * Returns the character at the current index in the source string. * * @return the current character, or {@code DONE} if the current index is * past the end. */ public char current() { if (offset == end) { return DONE; } return string.charAt(offset); } /** * Compares the specified object with this {@code StringCharacterIterator} * and indicates if they are equal. In order to be equal, {@code object} * must be an instance of {@code StringCharacterIterator} that iterates over * the same sequence of characters with the same index. * * @param object * the object to compare with this object. * @return {@code true} if the specified object is equal to this * {@code StringCharacterIterator}; {@code false} otherwise. * @see #hashCode */ @Override public boolean equals(Object object) { if (!(object instanceof StringCharacterIterator)) { return false; } StringCharacterIterator it = (StringCharacterIterator) object; return string.equals(it.string) && start == it.start && end == it.end && offset == it.offset; } /** * Sets the current position to the begin index and returns the character at * the new position in the source string. * * @return the character at the begin index or {@code DONE} if the begin * index is equal to the end index. */ public char first() { if (start == end) { return DONE; } offset = start; return string.charAt(offset); } /** * Returns the begin index in the source string. * * @return the index of the first character of the iteration. */ public int getBeginIndex() { return start; } /** * Returns the end index in the source string. * * @return the index one past the last character of the iteration. */ public int getEndIndex() { return end; } /** * Returns the current index in the source string. * * @return the current index. */ public int getIndex() { return offset; } @Override public int hashCode() { return string.hashCode() + start + end + offset; } /** * Sets the current position to the end index - 1 and returns the character * at the new position. * * @return the character before the end index or {@code DONE} if the begin * index is equal to the end index. */ public char last() { if (start == end) { return DONE; } offset = end - 1; return string.charAt(offset); } /** * Increments the current index and returns the character at the new index. * * @return the character at the next index, or {@code DONE} if the next * index would be past the end. */ public char next() { if (offset >= (end - 1)) { offset = end; return DONE; } return string.charAt(++offset); } /** * Decrements the current index and returns the character at the new index. * * @return the character at the previous index, or {@code DONE} if the * previous index would be past the beginning. */ public char previous() { if (offset == start) { return DONE; } return string.charAt(--offset); } /** * Sets the current index in the source string. * * @param location * the index the current position is set to. * @return the character at the new index, or {@code DONE} if * {@code location} is set to the end index. * @throws IllegalArgumentException * if {@code location} is smaller than the begin index or * greater than the end index. */ public char setIndex(int location) { if (location < start || location > end) { throw new IllegalArgumentException(); } offset = location; if (offset == end) { return DONE; } return string.charAt(offset); } /** * Sets the source string to iterate over. The begin and end positions are * set to the start and end of this string. * * @param value * the new source string. */ public void setText(String value) { string = value; start = offset = 0; end = value.length(); } }
/* * Copyright (C) 2008 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.ddmlib.testrunner; import com.android.ddmlib.IShellOutputReceiver; import com.android.ddmlib.Log; import com.android.ddmlib.MultiLineReceiver; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Parses the 'raw output mode' results of an instrumentation test run from shell and informs a * ITestRunListener of the results. * * <p>Expects the following output: * * <p>If fatal error occurred when attempted to run the tests: * <pre> * INSTRUMENTATION_STATUS: Error=error Message * INSTRUMENTATION_FAILED: * </pre> * <p>or * <pre> * INSTRUMENTATION_RESULT: shortMsg=error Message * </pre> * * <p>Otherwise, expect a series of test results, each one containing a set of status key/value * pairs, delimited by a start(1)/pass(0)/fail(-2)/error(-1) status code result. At end of test * run, expects that the elapsed test time in seconds will be displayed * * <p>For example: * <pre> * INSTRUMENTATION_STATUS_CODE: 1 * INSTRUMENTATION_STATUS: class=com.foo.FooTest * INSTRUMENTATION_STATUS: test=testFoo * INSTRUMENTATION_STATUS: numtests=2 * INSTRUMENTATION_STATUS: stack=com.foo.FooTest#testFoo:312 * com.foo.X * INSTRUMENTATION_STATUS_CODE: -2 * ... * * Time: X * </pre> * <p>Note that the "value" portion of the key-value pair may wrap over several text lines */ public class InstrumentationResultParser extends MultiLineReceiver { /** Relevant test status keys. */ private static class StatusKeys { private static final String TEST = "test"; private static final String CLASS = "class"; private static final String STACK = "stack"; private static final String NUMTESTS = "numtests"; private static final String ERROR = "Error"; private static final String SHORTMSG = "shortMsg"; } /** The set of expected status keys. Used to filter which keys should be stored as metrics */ private static final Set<String> KNOWN_KEYS = new HashSet<String>(); static { KNOWN_KEYS.add(StatusKeys.TEST); KNOWN_KEYS.add(StatusKeys.CLASS); KNOWN_KEYS.add(StatusKeys.STACK); KNOWN_KEYS.add(StatusKeys.NUMTESTS); KNOWN_KEYS.add(StatusKeys.ERROR); KNOWN_KEYS.add(StatusKeys.SHORTMSG); // unused, but regularly occurring status keys. KNOWN_KEYS.add("stream"); KNOWN_KEYS.add("id"); KNOWN_KEYS.add("current"); } /** Test result status codes. */ private static class StatusCodes { private static final int FAILURE = -2; private static final int START = 1; private static final int ERROR = -1; private static final int OK = 0; private static final int IN_PROGRESS = 2; } /** Prefixes used to identify output. */ private static class Prefixes { private static final String STATUS = "INSTRUMENTATION_STATUS: "; private static final String STATUS_CODE = "INSTRUMENTATION_STATUS_CODE: "; private static final String STATUS_FAILED = "INSTRUMENTATION_FAILED: "; private static final String CODE = "INSTRUMENTATION_CODE: "; private static final String RESULT = "INSTRUMENTATION_RESULT: "; private static final String TIME_REPORT = "Time: "; } private final Collection<ITestRunListener> mTestListeners; /** * Test result data */ private static class TestResult { private Integer mCode = null; private String mTestName = null; private String mTestClass = null; private String mStackTrace = null; private Integer mNumTests = null; /** Returns true if all expected values have been parsed */ boolean isComplete() { return mCode != null && mTestName != null && mTestClass != null; } /** Provides a more user readable string for TestResult, if possible */ @Override public String toString() { StringBuilder output = new StringBuilder(); if (mTestClass != null ) { output.append(mTestClass); output.append('#'); } if (mTestName != null) { output.append(mTestName); } if (output.length() > 0) { return output.toString(); } return "unknown result"; } } /** the name to provide to {@link ITestRunListener#testRunStarted(String, int)} */ private final String mTestRunName; /** Stores the status values for the test result currently being parsed */ private TestResult mCurrentTestResult = null; /** Stores the status values for the test result last parsed */ private TestResult mLastTestResult = null; /** Stores the current "key" portion of the status key-value being parsed. */ private String mCurrentKey = null; /** Stores the current "value" portion of the status key-value being parsed. */ private StringBuilder mCurrentValue = null; /** True if start of test has already been reported to listener. */ private boolean mTestStartReported = false; /** True if the completion of the test run has been detected. */ private boolean mTestRunFinished = false; /** True if test run failure has already been reported to listener. */ private boolean mTestRunFailReported = false; /** The elapsed time of the test run, in milliseconds. */ private long mTestTime = 0; /** True if current test run has been canceled by user. */ private boolean mIsCancelled = false; /** The number of tests currently run */ private int mNumTestsRun = 0; /** The number of tests expected to run */ private int mNumTestsExpected = 0; /** True if the parser is parsing a line beginning with "INSTRUMENTATION_RESULT" */ private boolean mInInstrumentationResultKey = false; /** * Stores key-value pairs under INSTRUMENTATION_RESULT header, these are printed at the * end of a test run, if applicable */ private Map<String, String> mInstrumentationResultBundle = new HashMap<String, String>(); /** * Stores key-value pairs of metrics emitted during the execution of each test case. Note that * standard keys that are stored in the TestResults class are filtered out of this Map. */ private Map<String, String> mTestMetrics = new HashMap<String, String>(); private static final String LOG_TAG = "InstrumentationResultParser"; /** Error message supplied when no parseable test results are received from test run. */ static final String NO_TEST_RESULTS_MSG = "No test results"; /** Error message supplied when a test start bundle is parsed, but not the test end bundle. */ static final String INCOMPLETE_TEST_ERR_MSG_PREFIX = "Test failed to run to completion"; static final String INCOMPLETE_TEST_ERR_MSG_POSTFIX = "Check device logcat for details"; /** Error message supplied when the test run is incomplete. */ static final String INCOMPLETE_RUN_ERR_MSG_PREFIX = "Test run failed to complete"; /** * Creates the InstrumentationResultParser. * * @param runName the test run name to provide to * {@link ITestRunListener#testRunStarted(String, int)} * @param listeners informed of test results as the tests are executing */ public InstrumentationResultParser(String runName, Collection<ITestRunListener> listeners) { mTestRunName = runName; mTestListeners = new ArrayList<ITestRunListener>(listeners); } /** * Creates the InstrumentationResultParser for a single listener. * * @param runName the test run name to provide to * {@link ITestRunListener#testRunStarted(String, int)} * @param listener informed of test results as the tests are executing */ public InstrumentationResultParser(String runName, ITestRunListener listener) { this(runName, Collections.singletonList(listener)); } /** * Processes the instrumentation test output from shell. * * @see MultiLineReceiver#processNewLines */ @Override public void processNewLines(String[] lines) { for (String line : lines) { parse(line); // in verbose mode, dump all adb output to log Log.v(LOG_TAG, line); } } /** * Parse an individual output line. Expects a line that is one of: * <ul> * <li> * The start of a new status line (starts with Prefixes.STATUS or Prefixes.STATUS_CODE), * and thus there is a new key=value pair to parse, and the previous key-value pair is * finished. * </li> * <li> * A continuation of the previous status (the "value" portion of the key has wrapped * to the next line). * </li> * <li> A line reporting a fatal error in the test run (Prefixes.STATUS_FAILED) </li> * <li> A line reporting the total elapsed time of the test run. (Prefixes.TIME_REPORT) </li> * </ul> * * @param line Text output line */ private void parse(String line) { if (line.startsWith(Prefixes.STATUS_CODE)) { // Previous status key-value has been collected. Store it. submitCurrentKeyValue(); mInInstrumentationResultKey = false; parseStatusCode(line); } else if (line.startsWith(Prefixes.STATUS)) { // Previous status key-value has been collected. Store it. submitCurrentKeyValue(); mInInstrumentationResultKey = false; parseKey(line, Prefixes.STATUS.length()); } else if (line.startsWith(Prefixes.RESULT)) { // Previous status key-value has been collected. Store it. submitCurrentKeyValue(); mInInstrumentationResultKey = true; parseKey(line, Prefixes.RESULT.length()); } else if (line.startsWith(Prefixes.STATUS_FAILED) || line.startsWith(Prefixes.CODE)) { // Previous status key-value has been collected. Store it. submitCurrentKeyValue(); mInInstrumentationResultKey = false; // these codes signal the end of the instrumentation run mTestRunFinished = true; // just ignore the remaining data on this line } else if (line.startsWith(Prefixes.TIME_REPORT)) { parseTime(line); } else { if (mCurrentValue != null) { // this is a value that has wrapped to next line. mCurrentValue.append("\r\n"); mCurrentValue.append(line); } else if (line.trim().length() > 0){ Log.d(LOG_TAG, "unrecognized line " + line); } } } /** * Stores the currently parsed key-value pair in the appropriate place. */ private void submitCurrentKeyValue() { if (mCurrentKey != null && mCurrentValue != null) { String statusValue = mCurrentValue.toString(); if (mInInstrumentationResultKey) { if (!KNOWN_KEYS.contains(mCurrentKey)) { mInstrumentationResultBundle.put(mCurrentKey, statusValue); } else if (mCurrentKey.equals(StatusKeys.SHORTMSG)) { // test run must have failed handleTestRunFailed(String.format("Instrumentation run failed due to '%1$s'", statusValue)); } } else { TestResult testInfo = getCurrentTestInfo(); if (mCurrentKey.equals(StatusKeys.CLASS)) { testInfo.mTestClass = statusValue.trim(); } else if (mCurrentKey.equals(StatusKeys.TEST)) { testInfo.mTestName = statusValue.trim(); } else if (mCurrentKey.equals(StatusKeys.NUMTESTS)) { try { testInfo.mNumTests = Integer.parseInt(statusValue); } catch (NumberFormatException e) { Log.w(LOG_TAG, "Unexpected integer number of tests, received " + statusValue); } } else if (mCurrentKey.equals(StatusKeys.ERROR)) { // test run must have failed handleTestRunFailed(statusValue); } else if (mCurrentKey.equals(StatusKeys.STACK)) { testInfo.mStackTrace = statusValue; } else if (!KNOWN_KEYS.contains(mCurrentKey)) { // Not one of the recognized key/value pairs, so dump it in mTestMetrics mTestMetrics.put(mCurrentKey, statusValue); } } mCurrentKey = null; mCurrentValue = null; } } /** * A utility method to return the test metrics from the current test case execution and get * ready for the next one. */ private Map<String, String> getAndResetTestMetrics() { Map<String, String> retVal = mTestMetrics; mTestMetrics = new HashMap<String, String>(); return retVal; } private TestResult getCurrentTestInfo() { if (mCurrentTestResult == null) { mCurrentTestResult = new TestResult(); } return mCurrentTestResult; } private void clearCurrentTestInfo() { mLastTestResult = mCurrentTestResult; mCurrentTestResult = null; } /** * Parses the key from the current line. * Expects format of "key=value". * * @param line full line of text to parse * @param keyStartPos the starting position of the key in the given line */ private void parseKey(String line, int keyStartPos) { int endKeyPos = line.indexOf('=', keyStartPos); if (endKeyPos != -1) { mCurrentKey = line.substring(keyStartPos, endKeyPos).trim(); parseValue(line, endKeyPos + 1); } } /** * Parses the start of a key=value pair. * * @param line - full line of text to parse * @param valueStartPos - the starting position of the value in the given line */ private void parseValue(String line, int valueStartPos) { mCurrentValue = new StringBuilder(); mCurrentValue.append(line.substring(valueStartPos)); } /** * Parses out a status code result. */ private void parseStatusCode(String line) { String value = line.substring(Prefixes.STATUS_CODE.length()).trim(); TestResult testInfo = getCurrentTestInfo(); testInfo.mCode = StatusCodes.ERROR; try { testInfo.mCode = Integer.parseInt(value); } catch (NumberFormatException e) { Log.w(LOG_TAG, "Expected integer status code, received: " + value); testInfo.mCode = StatusCodes.ERROR; } if (testInfo.mCode != StatusCodes.IN_PROGRESS) { // this means we're done with current test result bundle reportResult(testInfo); clearCurrentTestInfo(); } } /** * Returns true if test run canceled. * * @see IShellOutputReceiver#isCancelled() */ @Override public boolean isCancelled() { return mIsCancelled; } /** * Requests cancellation of test run. */ public void cancel() { mIsCancelled = true; } /** * Reports a test result to the test run listener. Must be called when a individual test * result has been fully parsed. * * @param statusMap key-value status pairs of test result */ private void reportResult(TestResult testInfo) { if (!testInfo.isComplete()) { Log.w(LOG_TAG, "invalid instrumentation status bundle " + testInfo.toString()); return; } reportTestRunStarted(testInfo); TestIdentifier testId = new TestIdentifier(testInfo.mTestClass, testInfo.mTestName); Map<String, String> metrics; switch (testInfo.mCode) { case StatusCodes.START: for (ITestRunListener listener : mTestListeners) { listener.testStarted(testId); } break; case StatusCodes.FAILURE: metrics = getAndResetTestMetrics(); for (ITestRunListener listener : mTestListeners) { listener.testFailed(ITestRunListener.TestFailure.FAILURE, testId, getTrace(testInfo)); listener.testEnded(testId, metrics); } mNumTestsRun++; break; case StatusCodes.ERROR: metrics = getAndResetTestMetrics(); for (ITestRunListener listener : mTestListeners) { listener.testFailed(ITestRunListener.TestFailure.ERROR, testId, getTrace(testInfo)); listener.testEnded(testId, metrics); } mNumTestsRun++; break; case StatusCodes.OK: metrics = getAndResetTestMetrics(); for (ITestRunListener listener : mTestListeners) { listener.testEnded(testId, metrics); } mNumTestsRun++; break; default: metrics = getAndResetTestMetrics(); Log.e(LOG_TAG, "Unknown status code received: " + testInfo.mCode); for (ITestRunListener listener : mTestListeners) { listener.testEnded(testId, metrics); } mNumTestsRun++; break; } } /** * Reports the start of a test run, and the total test count, if it has not been previously * reported. * * @param testInfo current test status values */ private void reportTestRunStarted(TestResult testInfo) { // if start test run not reported yet if (!mTestStartReported && testInfo.mNumTests != null) { for (ITestRunListener listener : mTestListeners) { listener.testRunStarted(mTestRunName, testInfo.mNumTests); } mNumTestsExpected = testInfo.mNumTests; mTestStartReported = true; } } /** * Returns the stack trace of the current failed test, from the provided testInfo. */ private String getTrace(TestResult testInfo) { if (testInfo.mStackTrace != null) { return testInfo.mStackTrace; } else { Log.e(LOG_TAG, "Could not find stack trace for failed test "); return new Throwable("Unknown failure").toString(); } } /** * Parses out and store the elapsed time. */ private void parseTime(String line) { final Pattern timePattern = Pattern.compile(String.format("%s\\s*([\\d\\.]+)", Prefixes.TIME_REPORT)); Matcher timeMatcher = timePattern.matcher(line); if (timeMatcher.find()) { String timeString = timeMatcher.group(1); try { float timeSeconds = Float.parseFloat(timeString); mTestTime = (long) (timeSeconds * 1000); } catch (NumberFormatException e) { Log.w(LOG_TAG, String.format("Unexpected time format %1$s", line)); } } else { Log.w(LOG_TAG, String.format("Unexpected time format %1$s", line)); } } /** * Inform the parser of a instrumentation run failure. Should be called when the adb command * used to run the test fails. */ public void handleTestRunFailed(String errorMsg) { errorMsg = (errorMsg == null ? "Unknown error" : errorMsg); Log.i(LOG_TAG, String.format("test run failed: '%1$s'", errorMsg)); if (mLastTestResult != null && mLastTestResult.isComplete() && StatusCodes.START == mLastTestResult.mCode) { // received test start msg, but not test complete // assume test caused this, report as test failure TestIdentifier testId = new TestIdentifier(mLastTestResult.mTestClass, mLastTestResult.mTestName); for (ITestRunListener listener : mTestListeners) { listener.testFailed(ITestRunListener.TestFailure.ERROR, testId, String.format("%1$s. Reason: '%2$s'. %3$s", INCOMPLETE_TEST_ERR_MSG_PREFIX, errorMsg, INCOMPLETE_TEST_ERR_MSG_POSTFIX)); listener.testEnded(testId, getAndResetTestMetrics()); } } for (ITestRunListener listener : mTestListeners) { if (!mTestStartReported) { // test run wasn't started - must have crashed before it started listener.testRunStarted(mTestRunName, 0); } listener.testRunFailed(errorMsg); listener.testRunEnded(mTestTime, mInstrumentationResultBundle); } mTestStartReported = true; mTestRunFailReported = true; } /** * Called by parent when adb session is complete. */ @Override public void done() { super.done(); if (!mTestRunFailReported) { handleOutputDone(); } } /** * Handles the end of the adb session when a test run failure has not been reported yet */ private void handleOutputDone() { if (!mTestStartReported && !mTestRunFinished) { // no results handleTestRunFailed(NO_TEST_RESULTS_MSG); } else if (mNumTestsExpected > mNumTestsRun) { final String message = String.format("%1$s. Expected %2$d tests, received %3$d", INCOMPLETE_RUN_ERR_MSG_PREFIX, mNumTestsExpected, mNumTestsRun); handleTestRunFailed(message); } else { for (ITestRunListener listener : mTestListeners) { if (!mTestStartReported) { // test run wasn't started, but it finished successfully. Must be a run with // no tests listener.testRunStarted(mTestRunName, 0); } listener.testRunEnded(mTestTime, mInstrumentationResultBundle); } } } }
package redhat.jee_migration_example.management.itemStoreManager; import static javax.ejb.ConcurrencyManagementType.BEAN; import static javax.ejb.TransactionAttributeType.REQUIRED; import static javax.ejb.TransactionManagementType.CONTAINER; import java.util.List; import java.util.Map; import javax.ejb.ConcurrencyManagement; import javax.ejb.LocalBean; import javax.ejb.Stateless; import javax.ejb.TransactionAttribute; import javax.ejb.TransactionManagement; import javax.inject.Inject; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.aries.Assert; import org.aries.message.Message; import org.aries.message.MessageInterceptor; import redhat.jee_migration_example.Item; @Stateless @LocalBean @ConcurrencyManagement(BEAN) @TransactionManagement(CONTAINER) public class ItemStoreManagerInterceptor extends MessageInterceptor<ItemStoreManager> { private static final Log log = LogFactory.getLog(ItemStoreManagerInterceptor.class); @Inject protected ItemStoreManagerHandler itemStoreManagerHandler; @TransactionAttribute(REQUIRED) public Message getAllItemStore(Message message) { try { List<Item> itemStore = itemStoreManagerHandler.getAllItemStore(); Assert.notNull(itemStore, "ItemStore must exist"); message.addPart("itemStore", itemStore); } catch (Throwable e) { log.error(e); //send error to event queue //forward message to invalid queue message.addPart("exception", e); } return message; } @TransactionAttribute(REQUIRED) public Message getFromItemStoreById(Message message) { try { Long itemId = message.getPart("itemId"); Item itemStore = itemStoreManagerHandler.getFromItemStore(itemId); Assert.notNull(itemStore, "ItemStore must exist"); message.addPart("itemStore", itemStore); } catch (Throwable e) { log.error(e); //send error to event queue //forward message to invalid queue message.addPart("exception", e); } return message; } @TransactionAttribute(REQUIRED) public Message getFromItemStoreByKey(Message message) { try { String itemKey = message.getPart("itemKey"); Item itemStore = itemStoreManagerHandler.getFromItemStore(itemKey); Assert.notNull(itemStore, "ItemStore must exist"); message.addPart("itemStore", itemStore); } catch (Throwable e) { log.error(e); //send error to event queue //forward message to invalid queue message.addPart("exception", e); } return message; } @TransactionAttribute(REQUIRED) public Message addToItemStore(Message message) { try { String key = message.getPart("key"); Item item = message.getPart("item"); Long itemId = itemStoreManagerHandler.addToItemStore(key, item); Assert.notNull(itemId, "ItemId must exist"); message.addPart("itemId", itemId); } catch (Throwable e) { log.error(e); //send error to event queue //forward message to invalid queue message.addPart("exception", e); } return message; } @TransactionAttribute(REQUIRED) public Message addToItemStoreAsList(Message message) { try { Map<String, Item> itemMap = message.getPart("itemMap"); List<Long> itemIds = itemStoreManagerHandler.addToItemStore(itemMap); Assert.notNull(itemIds, "ItemIds must exist"); message.addPart("itemIds", itemIds); } catch (Throwable e) { log.error(e); //send error to event queue //forward message to invalid queue message.addPart("exception", e); } return message; } @TransactionAttribute(REQUIRED) public Message addToItemStoreAsMap(Message message) { try { Map<String, Item> itemMap = message.getPart("itemMap"); List<Long> itemIds = itemStoreManagerHandler.addToItemStore(itemMap); Assert.notNull(itemIds, "ItemIds must exist"); message.addPart("itemIds", itemIds); } catch (Throwable e) { log.error(e); //send error to event queue //forward message to invalid queue message.addPart("exception", e); } return message; } @TransactionAttribute(REQUIRED) public Message removeAllItemStore(Message message) { try { itemStoreManagerHandler.removeAllItemStore(); } catch (Throwable e) { log.error(e); //send error to event queue //forward message to invalid queue message.addPart("exception", e); } return message; } @TransactionAttribute(REQUIRED) public Message removeFromItemStore(Message message) { try { String key = message.getPart("key"); Item item = message.getPart("item"); itemStoreManagerHandler.removeFromItemStore(key, item); } catch (Throwable e) { log.error(e); //send error to event queue //forward message to invalid queue message.addPart("exception", e); } return message; } @TransactionAttribute(REQUIRED) public Message removeFromItemStoreById(Message message) { try { Long itemId = message.getPart("itemId"); itemStoreManagerHandler.removeFromItemStore(itemId); } catch (Throwable e) { log.error(e); //send error to event queue //forward message to invalid queue message.addPart("exception", e); } return message; } @TransactionAttribute(REQUIRED) public Message removeFromItemStoreByKey(Message message) { try { String itemKey = message.getPart("itemKey"); itemStoreManagerHandler.removeFromItemStore(itemKey); } catch (Throwable e) { log.error(e); //send error to event queue //forward message to invalid queue message.addPart("exception", e); } return message; } @TransactionAttribute(REQUIRED) public Message removeFromItemStoreAsList(Message message) { try { Map<String, Item> itemMap = message.getPart("itemMap"); itemStoreManagerHandler.removeFromItemStore(itemMap); } catch (Throwable e) { log.error(e); //send error to event queue //forward message to invalid queue message.addPart("exception", e); } return message; } }
/** * <a href="http://www.openolat.org"> * OpenOLAT - Online Learning and Training</a><br> * <p> * Licensed under the Apache License, Version 2.0 (the "License"); <br> * you may not use this file except in compliance with the License.<br> * You may obtain a copy of the License at the * <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a> * <p> * Unless required by applicable law or agreed to in writing,<br> * software distributed under the License is distributed on an "AS IS" BASIS, <br> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br> * See the License for the specific language governing permissions and <br> * limitations under the License. * <p> * Initial code contributed and copyrighted by<br> * frentix GmbH, http://www.frentix.com * <p> */ package org.olat.portfolio.manager; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.olat.basesecurity.BaseSecurity; import org.olat.basesecurity.Constants; import org.olat.basesecurity.IdentityShort; import org.olat.basesecurity.Policy; import org.olat.basesecurity.SecurityGroup; import org.olat.core.commons.persistence.DB; import org.olat.core.commons.services.tagging.manager.TaggingManager; import org.olat.core.commons.services.tagging.model.Tag; import org.olat.core.id.Identity; import org.olat.core.id.IdentityEnvironment; import org.olat.core.id.OLATResourceable; import org.olat.core.id.Roles; import org.olat.core.logging.AssertException; import org.olat.core.manager.BasicManager; import org.olat.core.util.StringHelper; import org.olat.core.util.coordinate.Coordinator; import org.olat.core.util.coordinate.CoordinatorManager; import org.olat.core.util.coordinate.SyncerCallback; import org.olat.core.util.vfs.VFSContainer; import org.olat.course.CourseFactory; import org.olat.course.ICourse; import org.olat.course.assessment.AssessmentManager; import org.olat.course.assessment.AssessmentNotificationsHandler; import org.olat.course.nodes.CourseNode; import org.olat.course.run.userview.UserCourseEnvironment; import org.olat.course.run.userview.UserCourseEnvironmentImpl; import org.olat.group.BusinessGroup; import org.olat.modules.webFeed.portfolio.LiveBlogArtefactHandler; import org.olat.portfolio.PortfolioModule; import org.olat.portfolio.model.EPFilterSettings; import org.olat.portfolio.model.artefacts.AbstractArtefact; import org.olat.portfolio.model.structel.EPMapShort; import org.olat.portfolio.model.structel.EPPage; import org.olat.portfolio.model.structel.EPStructureElement; import org.olat.portfolio.model.structel.EPStructuredMap; import org.olat.portfolio.model.structel.EPTargetResource; import org.olat.portfolio.model.structel.ElementType; import org.olat.portfolio.model.structel.PortfolioStructure; import org.olat.portfolio.model.structel.PortfolioStructureMap; import org.olat.resource.OLATResource; import org.olat.search.SearchResults; import org.olat.search.model.AbstractOlatDocument; import org.olat.search.model.ResultDocument; import org.olat.search.service.indexer.identity.PortfolioArtefactIndexer; import org.olat.search.service.searcher.SearchClient; import org.olat.user.UserManager; /** * * Description:<br> * Manager for common used tasks for ePortfolio. Should be used for all calls * from controllers. will itself use all other managers to * manipulate artefacts or structureElements and policies. * * <P> * Initial Date: 11.06.2010 <br> * * @author Roman Haag, roman.haag@frentix.com, http://www.frentix.com */ public class EPFrontendManager extends BasicManager { private final Coordinator coordinator; private final BaseSecurity securityManager; private final EPArtefactManager artefactManager; private final EPStructureManager structureManager; private final TaggingManager taggingManager; private final AssessmentNotificationsHandler assessmentNotificationsHandler; private final DB dbInstance; private SearchClient searchClient; private final EPSettingsManager settingsManager; private final EPPolicyManager policyManager; private final UserManager userManager; private PortfolioModule portfolioModule; /** * [for Spring] */ public EPFrontendManager(EPArtefactManager artefactManager, EPStructureManager structureManager, EPSettingsManager settingsManager, EPPolicyManager policyManager, CoordinatorManager coordinatorManager, BaseSecurity securityManager, TaggingManager taggingManager, DB dbInstance, AssessmentNotificationsHandler assessmentNotificationsHandler, UserManager userManager) { this.artefactManager = artefactManager; this.structureManager = structureManager; this.securityManager = securityManager; this.coordinator = coordinatorManager.getCoordinator(); this.taggingManager = taggingManager; this.assessmentNotificationsHandler = assessmentNotificationsHandler; this.dbInstance = dbInstance; this.settingsManager = settingsManager; this.policyManager = policyManager; this.userManager = userManager; } /** * [used by Spring] * @param searchClient */ public void setSearchClient(SearchClient searchClient) { this.searchClient = searchClient; } /** * [used by Spring] * * @param portfolioModule */ public void setPortfolioModule(PortfolioModule portfolioModule) { this.portfolioModule = portfolioModule; } /** * Create and persist an artefact of the given type * * @param type * @return The persisted artefact */ public AbstractArtefact createAndPersistArtefact(Identity identity, String type) { return artefactManager.createAndPersistArtefact(identity, type); } /** * Persists the artefact and returns the new version * * @param artefact * @return The last version of the artefact */ public AbstractArtefact updateArtefact(AbstractArtefact artefact) { return artefactManager.updateArtefact(artefact); } /** * delete an artefact and also its vfs-artefactContainer * all used tags will also be deleted. * @param artefact */ public void deleteArtefact(AbstractArtefact artefact) { List<PortfolioStructure> linksToArtefact = structureManager.getAllReferencesForArtefact(artefact); for (PortfolioStructure portfolioStructure : linksToArtefact) { structureManager.removeArtefactFromStructure(artefact, portfolioStructure); } // load again as session might be closed between artefact = artefactManager.loadArtefactByKey(artefact.getKey()); artefactManager.deleteArtefact(artefact); } /** * delete all artefacts from this users including used tags for them * @param ident */ public void deleteUsersArtefacts(Identity ident){ List<AbstractArtefact> userArtefacts = artefactManager.getArtefactPoolForUser(ident); if (userArtefacts != null){ for (AbstractArtefact abstractArtefact : userArtefacts) { deleteArtefact(abstractArtefact); } } } public boolean isArtefactClosed(AbstractArtefact artefact) { return artefactManager.isArtefactClosed(artefact); } public PortfolioStructure removeArtefactFromStructure(AbstractArtefact artefact, PortfolioStructure structure) { return structureManager.removeArtefactFromStructure(artefact, structure); } /** * Create and persist a link between a structure element and an artefact. * * @param author The author of the link * @param artefact The artefact to link * @param structure The structure element * @return The link */ public boolean addArtefactToStructure(Identity author, AbstractArtefact artefact, PortfolioStructure structure) { return structureManager.addArtefactToStructure(author, artefact, structure); } /** * move artefact from old to new structure * do so by removing and re-adding to new target * @param artefact * @param oldParStruct * @param newParStruct * @return true if adding was successful */ public boolean moveArtefactFromStructToStruct(AbstractArtefact artefact, PortfolioStructure oldParStruct, PortfolioStructure newParStruct) { return structureManager.moveArtefactFromStructToStruct(artefact, oldParStruct, newParStruct); } public boolean moveArtefactInStruct(AbstractArtefact artefact, PortfolioStructure parStruct, int position) { return structureManager.moveArtefactInStruct(artefact, parStruct, position); } /** * move a structure to a new parent-structure and removes old link * @param structToBeMvd * @param oldParStruct * @param newParStruct * @return true if no exception occured */ public boolean moveStructureToNewParentStructure(PortfolioStructure structToBeMvd, PortfolioStructure oldParStruct, PortfolioStructure newParStruct, int destinationPos){ return structureManager.moveStructureToNewParentStructure(structToBeMvd, oldParStruct, newParStruct, destinationPos); } /** * move a structures order within the same parent, allows manual sorting. * @param structToBeMvd * @param destinationPos where it should be placed * @return true if it went ok, false otherwise */ public boolean moveStructureToPosition(PortfolioStructure structToBeMvd, int destinationPos){ return structureManager.reOrderStructures(loadStructureParent(structToBeMvd), structToBeMvd, destinationPos); } /** * set the reflexion for the link structureElement <-> artefact * this can be a different reflexion than the one of the artefact. * Reflects why the artefact was added to this structure! * @param artefact * @param structure * @param reflexion * @return */ public boolean setReflexionForArtefactToStructureLink(AbstractArtefact artefact, PortfolioStructure structure, String reflexion){ return structureManager.setReflexionForArtefactToStructureLink(artefact, structure, reflexion); } /** * get the reflexion set on the link structureElement <-> artefact * this can be a different reflexion than the one of the artefact. * Reflects why the artefact was added to this structure! * @param artefact * @param structure * @return String reflexion */ public String getReflexionForArtefactToStructureLink(AbstractArtefact artefact, PortfolioStructure structure){ return structureManager.getReflexionForArtefactToStructureLink(artefact, structure); } /** * counts amount of artefact in all structures and every child element * @param structure * @return */ public int countArtefactsRecursively(PortfolioStructure structure) { return structureManager.countArtefactsRecursively(structure, 0); } public int countArtefactsInMap(PortfolioStructureMap map) { return structureManager.countArtefactsRecursively(map); } /** * looks if the given artefact exists in the PortfolioStructure * @param artefact * @param structure * @return */ public boolean isArtefactInStructure(AbstractArtefact artefact, PortfolioStructure structure){ return structureManager.isArtefactInStructure(artefact, structure); } /** * load all artefacts with given businesspath. * setting an Identity to restrict to is optional. * this mostly is just to lookup for existence of already collected artefacts from same source * @param businessPath * @param author (optional) * @return */ public List<AbstractArtefact> loadArtefactsByBusinessPath(String businessPath, Identity author){ return artefactManager.loadArtefactsByBusinessPath(businessPath, author); } /** * List artefacts for indexing * @param author (optional) * @param firstResult (optional) * @param maxResults (optional) * @return */ public List<AbstractArtefact> getArtefacts(Identity author, int firstResult, int maxResults) { return artefactManager.getArtefacts(author, null, firstResult, maxResults); } /** * Load the artefact by its primary key * * @param key The primary key * @return The artefact or null if nothing found */ public AbstractArtefact loadArtefactByKey(Long key) { return artefactManager.loadArtefactByKey(key); } /** * get the users choice of attributes or a default * * @return */ public Map<String, Boolean> getArtefactAttributeConfig(Identity ident) { return settingsManager.getArtefactAttributeConfig(ident); } /** * persist the users chosen attributes to show as a property * * @param ident * @param artAttribConfig */ public void setArtefactAttributeConfig(Identity ident, Map<String, Boolean> artAttribConfig) { settingsManager.setArtefactAttributeConfig(ident, artAttribConfig); } /** * get all persisted filters from a given user * @param ident * @return filtersettings or list with an empty filter, if none were found */ public List<EPFilterSettings> getSavedFilterSettings(Identity ident){ return settingsManager.getSavedFilterSettings(ident); } /** * persist users filter settings as property, only save such with a name * @param ident * @param filterList */ public void setSavedFilterSettings(Identity ident, List<EPFilterSettings> filterList){ settingsManager.setSavedFilterSettings(ident, filterList); } /** * remove a given filter from users list * @param ident * @param filterName */ public void deleteFilterFromUsersList(Identity ident, String filterID){ settingsManager.deleteFilterFromUsersList(ident, filterID); } /** * get the last selected PortfolioStructure of this user * @param ident Identity * @return the loaded PortfolioStructure */ public PortfolioStructure getUsersLastUsedPortfolioStructure (Identity ident){ Long structKey = settingsManager.getUsersLastUsedPortfolioStructureKey(ident); if (structKey != null) { PortfolioStructure struct = structureManager.loadPortfolioStructureByKey(structKey); return struct; } return null; } /** * get the users prefered viewing mode for artefacts (either table / preview) * @param ident * @return */ public String getUsersPreferedArtefactViewMode(Identity ident, String context){ return settingsManager.getUsersPreferedArtefactViewMode(ident, context); } /** * persist the users prefered viewing mode for artefacts (either table / preview) * @param ident * @param preferedMode */ public void setUsersPreferedArtefactViewMode(Identity ident, String preferedMode, String context){ settingsManager.setUsersPreferedArtefactViewMode(ident, preferedMode, context); } /** * persist the last uses PortfolioStructure to use it later on * @param ident Identity * @param struct */ public void setUsersLastUsedPortfolioStructure(Identity ident, PortfolioStructure struct){ settingsManager.setUsersLastUsedPortfolioStructure(ident, struct); } /** * returns an array of tags for given artefact * * @param artefact * @return null if none are found */ public List<String> getArtefactTags(AbstractArtefact artefact) { return artefactManager.getArtefactTags(artefact); } /** * add a tag to an artefact (will save a tag pointing to this artefact) * * @param identity * @param artefact * @param tag */ public void setArtefactTag(Identity identity, AbstractArtefact artefact, String tag) { artefactManager.setArtefactTag(identity, artefact, tag); } /** * add a List of tags to an artefact * * @param identity * @param artefact * @param tags */ public void setArtefactTags(Identity identity, AbstractArtefact artefact, List<String> tags) { artefactManager.setArtefactTags(identity, artefact, tags); } /** * get all maps wherein (or in sub-structures) the given artefact is linked. * * @param artefact * @return */ public List<PortfolioStructure> getReferencedMapsForArtefact(AbstractArtefact artefact) { return structureManager.getReferencedMapsForArtefact(artefact); } /** * get all artefacts for the given identity this represents the artefact pool * * @param ident * @return */ public List<AbstractArtefact> getArtefactPoolForUser(Identity ident) { return artefactManager.getArtefactPoolForUser(ident); } public EPArtefactTagCloud getArtefactsAndTagCloud(Identity identity, List<String> tags) { return artefactManager.getArtefactsAndTagCloud(identity, tags); } /** * filter the provided list of artefacts with different filters * * @param allArtefacts the list to manipulate on * @param filterSettings Settings for the filter to work on * @return */ public List<AbstractArtefact> filterArtefactsByFilterSettings(EPFilterSettings filterSettings, Identity identity, Roles roles) { List<Long> artefactKeys = fulltextSearchAfterArtefacts(filterSettings, identity, roles); if(artefactKeys == null || artefactKeys.isEmpty()) { List<AbstractArtefact> allArtefacts = artefactManager.getArtefactPoolForUser(identity); return artefactManager.filterArtefactsByFilterSettings(allArtefacts, filterSettings); } List<AbstractArtefact> artefacts = artefactManager.getArtefacts(identity, artefactKeys, 0, 500); // remove the text-filter when the lucene-search got some results before EPFilterSettings settings = filterSettings.cloneAfterFullText(); return artefactManager.filterArtefactsByFilterSettings(artefacts, settings); } private List<Long> fulltextSearchAfterArtefacts(EPFilterSettings filterSettings, Identity identity, Roles roles) { String query = filterSettings.getTextFilter(); if (StringHelper.containsNonWhitespace(query)) { try { List<String> queries = new ArrayList<String>(); appendAnd(queries, AbstractOlatDocument.RESERVED_TO, ":\"", identity.getKey().toString(), "\""); appendAnd(queries, "(", AbstractOlatDocument.DOCUMENTTYPE_FIELD_NAME, ":(", PortfolioArtefactIndexer.TYPE, "*))"); SearchResults searchResults = searchClient.doSearch(query, queries, identity, roles, 0, 1000, false); List<Long> keys = new ArrayList<Long>(); if (searchResults != null) { String marker = AbstractArtefact.class.getSimpleName(); for (ResultDocument doc : searchResults.getList()) { String businessPath = doc.getResourceUrl(); int start = businessPath.indexOf(marker); if (start > 0) { start += marker.length() + 1; int stop = businessPath.indexOf(']', start); if (stop < businessPath.length()) { String keyStr = businessPath.substring(start, stop); try { keys.add(Long.parseLong(keyStr)); } catch (Exception e) { logError("Not a primary key: " + keyStr, e); } } } } } return keys; } catch (Exception e) { logError("", e); return Collections.emptyList(); } } else return Collections.emptyList(); } private void appendAnd(List<String> queries, String... strings) { StringBuilder query = new StringBuilder(); for(String string:strings) { query.append(string); } if(query.length() > 0) { queries.add(query.toString()); } } /** * returns defined amount of users mostly used tags, sorted by occurrence of tag * @param ident * @param amount nr of tags to return, if 0: the default (5) will be * returned, if -1: you will get all * @return a combined map with tags including occurrence and tag * format: "house (7), house" */ public Map<String, String> getUsersMostUsedTags(Identity ident, Integer amount) { amount = (amount == 0) ? 5 : amount; List<String> outp = new ArrayList<String>(); Map<String, String> res = new HashMap<String, String>(); List<Map<String, Integer>> bla = taggingManager.getUserTagsWithFrequency(ident); for (Map<String, Integer> map : bla) { String caption = map.get("tag") + " (" + map.get("nr") + ")"; outp.add(caption); res.put(caption, String.valueOf(map.get("tag"))); if (amount == res.size()) break; } return res; } /** * get all tags a user owns, ordered and without duplicates * @param ident * @return */ public List<String> getUsersTags(Identity ident) { return taggingManager.getUserTagsAsString(ident); } /** * get all tags restricted to Artefacts a user owns, ordered and without duplicates * @param ident * @return */ public List<String> getUsersTagsOfArtefactType(Identity ident) { return taggingManager.getUserTagsOfTypeAsString(ident, AbstractArtefact.class.getSimpleName()); } /** * lookup resources for a given tags * @param tagList * @return */ public Set<OLATResourceable> getResourcesByTags(List<Tag> tagList) { return taggingManager.getResourcesByTags(tagList); } /** * get all tags for a given resource * @param ores * @return */ public List<Tag> loadTagsForResource(OLATResourceable ores) { return taggingManager.loadTagsForResource(ores, null, null); } /** * sync map with its former source (template) */ public boolean synchronizeStructuredMapToUserCopy(PortfolioStructureMap map) { final EPStructuredMap userMap = (EPStructuredMap) map; final EPStructureManager structMgr = structureManager; // only remove // synthetic access // warnings Boolean synched = coordinator.getSyncer().doInSync(map.getOlatResource(), new SyncerCallback<Boolean>() { public Boolean execute() { if (userMap.getStructuredMapSource() == null) { return Boolean.FALSE; } // need to reload it, I don't know why Long templateKey = userMap.getStructuredMapSource().getKey(); userMap.setLastSynchedDate(new Date()); PortfolioStructure template = structMgr.loadPortfolioStructureByKey(templateKey); structMgr.syncStructureRecursively(template, userMap, true); return Boolean.TRUE; } }); return synched.booleanValue(); } /** * Assign a structure map to user. In other words, make a copy of the template * and set the user as an author. * * @param identity * @param portfolioStructureStructuredMapTemplate */ //TODO: when implementing transactions, pay attention to this public PortfolioStructureMap assignStructuredMapToUser(Identity identity, PortfolioStructureMap mapTemplate, OLATResourceable targetOres, String targetSubPath, final String targetBusinessPath, final Date deadline) { // doInSync is here to check for nested doInSync exception in first place final Identity author = identity; // only remove synthetic access warnings final EPStructureManager structMgr = structureManager; final long tempKey = mapTemplate.getKey(); final OLATResourceable ores = targetOres; final String subPath = targetSubPath; PortfolioStructureMap map = coordinator.getSyncer().doInSync(mapTemplate.getOlatResource(), new SyncerCallback<PortfolioStructureMap>() { public PortfolioStructureMap execute() { // OLAT-6274: reload template in the moment before copying it! PortfolioStructureMap template = (PortfolioStructureMap) structMgr.loadPortfolioStructureByKey(tempKey); String title = template.getTitle(); String description = template.getDescription(); PortfolioStructureMap copy = structMgr.createPortfolioStructuredMap(template, author, title, description, ores, subPath, targetBusinessPath); if(copy instanceof EPStructuredMap) { ((EPStructuredMap)copy).setDeadLine(deadline); } structMgr.copyStructureRecursively(template, copy, true); return copy; } }); return map; } /** * Low level function to copy the structure of elements, with or without the artefacts * @param source * @param target * @param withArtefacts */ public void copyStructureRecursively(PortfolioStructure source, PortfolioStructure target, boolean withArtefacts) { structureManager.copyStructureRecursively(source, target, withArtefacts); } /** * Return the structure elements of the given type without permission control. Need this for indexing. * @param firstResult * @param maxResults * @param type * @return */ public List<PortfolioStructure> getStructureElements(int firstResult, int maxResults, ElementType... type) { return structureManager.getStructureElements(firstResult, maxResults, type); } /** * get all Structure-Elements linked to identity over a security group (owner) * * @param ident * @return */ public List<PortfolioStructure> getStructureElementsForUser(Identity identity, ElementType... type) { return structureManager.getStructureElementsForUser(identity, type); } /** * Get all Structure-Elements linked which the identity can see over a policy, * * @param ident The identity which what see maps * @param chosenOwner Limit maps from this identity * @param type Limit maps to this or these types * @return */ public List<PortfolioStructure> getStructureElementsFromOthers(final Identity ident, final Identity chosenOwner, final ElementType... type) { return structureManager.getStructureElementsFromOthersLimited(ident, chosenOwner, 0, 0, type); } /** * Get part of the Structure-Elements linked which the identity can see over a policy. * The range of elements returned is specified by limitFrom and limitTo (used for paging) * * @param ident The identity which what see maps * @param chosenOwner Limit maps from this identity * @param limitFrom Limit maps * @param limitTo Limit maps * @param type Limit maps to this or these types * @return */ public List<PortfolioStructure> getStructureElementsFromOthers(final Identity ident, final Identity chosenOwner, int limitFrom, int limitTo, final ElementType... type) { return structureManager.getStructureElementsFromOthersLimited(ident, chosenOwner, limitFrom, limitTo, type); } /** * Get the number of all Structure-Elements linked which the identity can see over a policy, * * @param ident The identity which what see maps * @param chosenOwner Limit maps from this identity * @param type Limit maps to this or these types * @return */ public int countStructureElementsFromOthers(final Identity ident, final Identity chosenOwner, final ElementType... types) { return structureManager.countStructureElementsFromOthers(ident, chosenOwner, types); } /** * Get all Structure-Elements linked which the identity can see over a policy, * WITHOUT those that are public to all OLAT users ( GROUP_OLATUSERS ) * !! this should be used, to save performance when there are a lot of public shared maps!! * @param ident The identity which what see maps * @param chosenOwner Limit maps from this identity * @param type Limit maps to this or these types * @return */ public List<PortfolioStructure> getStructureElementsFromOthersWithoutPublic(Identity ident, Identity choosenOwner, ElementType... types){ return structureManager.getStructureElementsFromOthersWithoutPublic(ident, choosenOwner, types); } /** * Return the list of artefacts glued to this structure element * @param structure * @return A list of artefacts */ public List<AbstractArtefact> getArtefacts(PortfolioStructure structure) { return structureManager.getArtefacts(structure); } /** * FXOLAT-431 * * @param map * @return * public List<AbstractArtefact> getAllArtefactsInMap(EPAbstractMap map){ return structureManager.getAllArtefactsInMap(map); } */ /** * get statistics about how much of the required (min, equal) collect-restrictions have been fulfilled. * * @param structure * @return array with "done" at 0 and "to be done" at 1, or "null" if no restrictions apply */ public String[] getRestrictionStatistics(PortfolioStructure structure) { Integer[] stats = structureManager.getRestrictionStatistics(structure); if(stats == null) { return null; } else { return new String[]{stats[0].toString(), stats[1].toString()}; } } /** * same as getRestrictionStatistics(PortfolioStructure structure) but recursively for a map. * get statistics about how much of the required (min, equal) collect-restrictions have been fulfilled. * * @param structure * @return array with "done" at 0 and "to be done" at 1, or "null" if no restrictions apply */ public String[] getRestrictionStatisticsOfMap(final PortfolioStructureMap structure) { Integer[] stats = structureManager.getRestrictionStatisticsOfMap(structure, 0, 0); return new String[]{stats[0].toString(), stats[1].toString()}; } /** * Check the collect restriction against the structure element * @param structure * @return */ public boolean checkCollectRestriction(PortfolioStructure structure) { return structureManager.checkCollectRestriction(structure); } public boolean checkCollectRestrictionOfMap(PortfolioStructureMap structure) { return checkAllCollectRestrictionRec(structure); } protected boolean checkAllCollectRestrictionRec(PortfolioStructure structure) { boolean allOk = structureManager.checkCollectRestriction(structure); List<PortfolioStructure> children = structureManager.loadStructureChildren(structure); for(PortfolioStructure child:children) { allOk &= checkAllCollectRestrictionRec(child); } return allOk; } /** * Create a map for a user * @param root * @param identity * @param title * @param description * @return */ public PortfolioStructureMap createAndPersistPortfolioDefaultMap(Identity identity, String title, String description) { PortfolioStructureMap map = structureManager.createPortfolioDefaultMap(identity, title, description); structureManager.savePortfolioStructure(map); return map; } /** * Create a map for a group * @param root * @param group * @param title * @param description * @return */ public PortfolioStructureMap createAndPersistPortfolioDefaultMap(BusinessGroup group, String title, String description) { PortfolioStructureMap map = structureManager.createPortfolioDefaultMap(group, title, description); structureManager.savePortfolioStructure(map); return map; } /** * Create a structured map, based on template. * * @param identity The author/owner of the map * @param title * @param description * @return The structure element */ public PortfolioStructureMap createAndPersistPortfolioStructuredMap(PortfolioStructureMap template, Identity identity, String title, String description, OLATResourceable targetOres, String targetSubPath, String targetBusinessPath) { PortfolioStructureMap map = structureManager.createPortfolioStructuredMap(template, identity, title, description, targetOres, targetSubPath, targetBusinessPath); structureManager.savePortfolioStructure(map); return map; } /** * create a structure-element * @param root * @param title * @param description * @return */ public PortfolioStructure createAndPersistPortfolioStructureElement(PortfolioStructure root, String title, String description) { EPStructureElement newStruct = (EPStructureElement) structureManager.createPortfolioStructure(root, title, description); if (root != null) structureManager.addStructureToStructure(root, newStruct, -1); structureManager.savePortfolioStructure(newStruct); return newStruct; } /** * create a page * @param root * @param title * @param description * @return */ public PortfolioStructure createAndPersistPortfolioPage(PortfolioStructure root, String title, String description) { EPPage newPage = (EPPage) structureManager.createPortfolioPage(root, title, description); if (root != null) structureManager.addStructureToStructure(root, newPage, -1); structureManager.savePortfolioStructure(newPage); return newPage; } /** * This method is reserved to the repository. It removes the template * completely * @param pStruct */ public void deletePortfolioMapTemplate(OLATResourceable res) { structureManager.deletePortfolioMapTemplate(res); } /** * delete a portfoliostructure recursively with its childs * @param pStruct */ public void deletePortfolioStructure(PortfolioStructure pStruct) { structureManager.removeStructureRecursively(pStruct); } /** * save or update a structure * @param pStruct */ public void savePortfolioStructure(PortfolioStructure pStruct) { structureManager.savePortfolioStructure(pStruct); } /** * Number of children */ public int countStructureChildren(PortfolioStructure structure) { return structureManager.countStructureChildren(structure); } /** * Load a protfolio structure by its resource * @param ores * @return */ public PortfolioStructure loadPortfolioStructure(OLATResourceable ores) { return structureManager.loadPortfolioStructure(ores); } /** * Load a protfolio structure by its resourceable id * @param ores * @return */ public EPMapShort loadMapShortByResourceId(Long resId) { return structureManager.loadMapShortByResourceId(resId); } /** * Load a portfolio structure by its primary key. DON'T USE THIS METHOD * TO RELOAD AN OBJECT. If you want do this, use the method * reloadPortfolioStructure(PortfolioStructure structure) * @param key cannot be null * @return The structure element or null if not found */ public PortfolioStructure loadPortfolioStructureByKey(Long key){ return structureManager.loadPortfolioStructureByKey(key); } /** * Reload a portfolio structure * @param structure cannot be null * @return The reloaded structure element */ public PortfolioStructure reloadPortfolioStructure(PortfolioStructure structure){ return structureManager.reloadPortfolioStructure(structure); } /** * Load the OLAT resource with the primary of the structure element * @param key cannot be null * @return The resource or null if not found */ public OLATResource loadOlatResourceFromByKey(Long key) { return structureManager.loadOlatResourceFromStructureElByKey(key); } /** * Retrieve the parent of the structure * @param structure * @return */ public PortfolioStructure loadStructureParent(PortfolioStructure structure) { return structureManager.loadStructureParent(structure); } /** * Retrieve the children structures * @param structure * @return */ public List<PortfolioStructure> loadStructureChildren(PortfolioStructure structure) { return structureManager.loadStructureChildren(structure); } /** * * @param structure * @param firstResult * @param maxResults * @return */ public List<PortfolioStructure> loadStructureChildren(PortfolioStructure structure, int firstResult, int maxResults) { return structureManager.loadStructureChildren(structure, firstResult, maxResults); } public PortfolioStructureMap loadPortfolioStructureMap(Identity identity, PortfolioStructureMap template, OLATResourceable targetOres, String targetSubPath, String targetBusinessPath){ //sync the map with the template on opening it in gui, not on loading! return structureManager.loadPortfolioStructuredMap(identity, template, targetOres, targetSubPath, targetBusinessPath); } /** * * @param identity Cannot be null * @param targetOres Cannot be null * @param targetSubPath * @param targetBusinessPath * @return */ public List<PortfolioStructureMap> loadPortfolioStructureMaps(Identity identity, OLATResourceable targetOres, String targetSubPath, String targetBusinessPath){ //sync the map with the template on opening it in gui, not on loading! return structureManager.loadPortfolioStructuredMaps(identity, targetOres, targetSubPath, targetBusinessPath); } /** * get the "already in use" state of a structuredMapTemplate * @param template * @param targetOres * @param targetSubPath * @param targetBusinessPath * @return */ public boolean isTemplateInUse(PortfolioStructureMap template, OLATResourceable targetOres, String targetSubPath, String targetBusinessPath) { return structureManager.isTemplateInUse(template, targetOres, targetSubPath, targetBusinessPath); } /** * get root vfs-container where artefact file-system data is persisted * @return */ public VFSContainer getArtefactsRoot(){ return artefactManager.getArtefactsRoot(); } /** * get vfs-container of a specific artefact * @param artefact * @return */ public VFSContainer getArtefactContainer(AbstractArtefact artefact) { return artefactManager.getArtefactContainer(artefact); } /** * get a temporary folder to store files while in wizzard * @param ident * @return */ public VFSContainer getArtefactsTempContainer(Identity ident){ return artefactManager.getArtefactsTempContainer(ident); } /** * as large fulltext-content of an artefact is persisted on filesystem, use this method to get fulltext * * @param artefact * @return */ public String getArtefactFullTextContent(AbstractArtefact artefact){ return artefactManager.getArtefactFullTextContent(artefact); } /** * Check if the identity is the owner of this portfolio resource. * @param identity * @param ores * @return */ public boolean isMapOwner(Identity identity, OLATResourceable ores) { return structureManager.isMapOwner(identity, ores); } /** * Check if the identity is owner of the portfolio resource or * in a valid policy. * @param identity * @param ores * @return */ public boolean isMapVisible(Identity identity, OLATResourceable ores) { return structureManager.isMapVisible(identity, ores); } public boolean isMapShared(PortfolioStructureMap map) { OLATResource resource = map.getOlatResource(); return isMapShared(resource); } public boolean isMapShared(OLATResource resource) { List<Policy> policies = securityManager.getPoliciesOfResource(resource, null); for(Policy policy:policies) { if(policy.getPermission().contains(Constants.PERMISSION_READ)) { return true; } } return false; } /** * Return a list of wrapper containing the read policies of the map * @param map */ public List<EPMapPolicy> getMapPolicies(PortfolioStructureMap map) { return policyManager.getMapPolicies(map); } /** * Update the map policies of a map. The missing policies are deleted! * @param map * @param policyWrappers */ public void updateMapPolicies(PortfolioStructureMap map, List<EPMapPolicy> policyWrappers) { policyManager.updateMapPolicies(map, policyWrappers); } /** * submit and close a structured map from a portfolio task * @param map */ public void submitMap(PortfolioStructureMap map) { submitMap(map, true); } private void submitMap(PortfolioStructureMap map, boolean logActivity) { if(!(map instanceof EPStructuredMap)) return;//add an exception EPStructuredMap submittedMap = (EPStructuredMap)map; structureManager.submitMap(submittedMap); EPTargetResource resource = submittedMap.getTargetResource(); OLATResourceable courseOres = resource.getOLATResourceable(); ICourse course = CourseFactory.loadCourse(courseOres); AssessmentManager am = course.getCourseEnvironment().getAssessmentManager(); CourseNode courseNode = course.getRunStructure().getNode(resource.getSubPath()); List<Identity> owners = securityManager.getIdentitiesOfSecurityGroup(submittedMap.getOwnerGroup()); for(Identity owner:owners) { if (courseNode != null) { // courseNode might have been deleted meanwhile IdentityEnvironment ienv = new IdentityEnvironment(); ienv.setIdentity(owner); UserCourseEnvironment uce = new UserCourseEnvironmentImpl(ienv, course.getCourseEnvironment()); if(logActivity) { am.incrementNodeAttempts(courseNode, owner, uce); } else { am.incrementNodeAttemptsInBackground(courseNode, owner, uce); } } assessmentNotificationsHandler.markPublisherNews(owner, course.getResourceableId()); logAudit("Map " + map + " from " + owner.getName() + " has been submitted."); } } /** * Close all maps after the deadline if there is a deadline. It can be a long running * process if a lot of maps are involved. */ public void closeMapAfterDeadline() { List<PortfolioStructureMap> mapsToClose = structureManager.getOpenStructuredMapAfterDeadline(); int count = 0; for(PortfolioStructureMap mapToClose:mapsToClose) { submitMap(mapToClose, false); if(count % 5 == 0) { // this possibly takes longer than connection timeout, so do intermediatecommits. dbInstance.intermediateCommit(); } } } /** * get a valid name of style for a given PortfolioStructure * if style is not enabled anymore, the default will be used. * @param struct * @return the set style or the default from config if nothing is set. */ public String getValidStyleName(PortfolioStructure struct){ // first style in list is the default, can be named default. List<String> allStyles = portfolioModule.getAvailableMapStyles(); if (allStyles == null || allStyles.size() == 0) throw new AssertException("at least one style (that also exists in brasato.css must be configured for maps."); String styleName = ((EPStructureElement)struct).getStyle(); if(StringHelper.containsNonWhitespace(styleName) && allStyles.contains(styleName)) { return styleName; } return allStyles.get(0); } /** * The structure will be without any check on the DB copied. All the * children structures MUST be loaded. This method is to use with the * output of XStream at examples. * @param root * @param identity * @return The persisted structure */ public PortfolioStructureMap importPortfolioMapTemplate(PortfolioStructure root, Identity identity) { return structureManager.importPortfolioMapTemplate(root, identity); } /** * check if given identity has access to this feed. * reverse lookup feed -> artefact -> shared map * @param feed * @param identity * @return */ public boolean checkFeedAccess(OLATResourceable feed, Identity identity){ String feedBP = LiveBlogArtefactHandler.LIVEBLOG + feed.getResourceableId() + "]"; List<AbstractArtefact> artefact = loadArtefactsByBusinessPath(feedBP, null); if (artefact != null && artefact.size() == 1) { List<PortfolioStructure> linkedMaps = getReferencedMapsForArtefact(artefact.get(0)); for (PortfolioStructure map : linkedMaps) { if (isMapVisible(identity, map)){ return true; } } // see OLAT-6282: allow the owner of the artefact to view the feed, even if its not any longer in any map. if (linkedMaps.size() == 0 && artefact.get(0).getAuthor().equalsByPersistableKey(identity)){ return true; } } return false; } /** * returns all Owners of the given map as comma-separated list * @param map * @return */ public String getAllOwnersAsString(PortfolioStructureMap map){ if(map.getOwnerGroup() == null) { return null; } List<SecurityGroup> ownerGroups = Collections.singletonList(map.getOwnerGroup()); List<IdentityShort> ownerIdents = securityManager.getIdentitiesShortOfSecurityGroups(ownerGroups, 0, -1); List<String> identNames = new ArrayList<String>(); for (IdentityShort identity : ownerIdents) { String fullName = identity.getFirstName() + " " + identity.getLastName(); identNames.add(fullName); } return StringHelper.formatAsCSVString(identNames); } /** * returns the first Owner for the given Map. * * @param map * @return */ public String getFirstOwnerAsString(PortfolioStructureMap map){ if(map.getOwnerGroup() == null) { return "n/a"; } List<Identity> ownerIdents = securityManager.getIdentitiesOfSecurityGroup(map.getOwnerGroup(), 0, 1); if(ownerIdents.size() > 0){ Identity id = ownerIdents.get(0); return userManager.getUserDisplayName(id); } return "n/a"; } public String getFirstOwnerAsString(EPMapShort map){ if(map.getOwnerGroup() == null) { return "n/a"; } List<SecurityGroup> secGroups = Collections.singletonList(map.getOwnerGroup()); List<IdentityShort> ownerIdents = securityManager.getIdentitiesShortOfSecurityGroups(secGroups, 0, 1); if(ownerIdents.size() > 0){ IdentityShort id = ownerIdents.get(0); return userManager.getUserDisplayName(id); } return "n/a"; } /** * returns the first OwnerIdentity for the given Map. * * @param map * @return */ public Identity getFirstOwnerIdentity(PortfolioStructureMap map){ if(map.getOwnerGroup() == null) { return null; } List<Identity> ownerIdents = securityManager.getIdentitiesOfSecurityGroup(map.getOwnerGroup(), 0, 1); if (ownerIdents.size() > 0) { Identity id = ownerIdents.get(0); return id; } return null; } // not yet available public void archivePortfolio() {} // not yet available public void exportPortfolio() {} // not yet available public void importPortfolio() {} }
package org.springframework.security.oauth2.provider.token.store.redis; import org.springframework.data.redis.connection.RedisConnection; import org.springframework.data.redis.connection.RedisConnectionFactory; import org.springframework.data.redis.core.Cursor; import org.springframework.data.redis.core.ScanOptions; import org.springframework.security.oauth2.common.ExpiringOAuth2RefreshToken; import org.springframework.security.oauth2.common.OAuth2AccessToken; import org.springframework.security.oauth2.common.OAuth2RefreshToken; import org.springframework.security.oauth2.provider.OAuth2Authentication; import org.springframework.security.oauth2.provider.token.AuthenticationKeyGenerator; import org.springframework.security.oauth2.provider.token.DefaultAuthenticationKeyGenerator; import org.springframework.security.oauth2.provider.token.TokenStore; import org.springframework.util.ClassUtils; import org.springframework.util.ReflectionUtils; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.List; /** * @author efenderbosch */ public class RedisTokenStore implements TokenStore { private static final String ACCESS = "access:"; private static final String AUTH_TO_ACCESS = "auth_to_access:"; private static final String AUTH = "auth:"; private static final String REFRESH_AUTH = "refresh_auth:"; private static final String ACCESS_TO_REFRESH = "access_to_refresh:"; private static final String REFRESH = "refresh:"; private static final String REFRESH_TO_ACCESS = "refresh_to_access:"; private static final String CLIENT_ID_TO_ACCESS = "client_id_to_access:"; private static final String UNAME_TO_ACCESS = "uname_to_access:"; private static final boolean springDataRedis_2_0 = ClassUtils.isPresent( "org.springframework.data.redis.connection.RedisStandaloneConfiguration", RedisTokenStore.class.getClassLoader()); private final RedisConnectionFactory connectionFactory; private AuthenticationKeyGenerator authenticationKeyGenerator = new DefaultAuthenticationKeyGenerator(); private RedisTokenStoreSerializationStrategy serializationStrategy = new JdkSerializationStrategy(); private String prefix = ""; private Method redisConnectionSet_2_0; public RedisTokenStore(RedisConnectionFactory connectionFactory) { this.connectionFactory = connectionFactory; if (springDataRedis_2_0) { this.loadRedisConnectionMethods_2_0(); } } public void setAuthenticationKeyGenerator(AuthenticationKeyGenerator authenticationKeyGenerator) { this.authenticationKeyGenerator = authenticationKeyGenerator; } public void setSerializationStrategy(RedisTokenStoreSerializationStrategy serializationStrategy) { this.serializationStrategy = serializationStrategy; } public void setPrefix(String prefix) { this.prefix = prefix; } private void loadRedisConnectionMethods_2_0() { this.redisConnectionSet_2_0 = ReflectionUtils.findMethod( RedisConnection.class, "set", byte[].class, byte[].class); } private RedisConnection getConnection() { return connectionFactory.getConnection(); } private byte[] serialize(Object object) { return serializationStrategy.serialize(object); } private byte[] serializeKey(String object) { return serialize(prefix + object); } private OAuth2AccessToken deserializeAccessToken(byte[] bytes) { return serializationStrategy.deserialize(bytes, OAuth2AccessToken.class); } private OAuth2Authentication deserializeAuthentication(byte[] bytes) { return serializationStrategy.deserialize(bytes, OAuth2Authentication.class); } private OAuth2RefreshToken deserializeRefreshToken(byte[] bytes) { return serializationStrategy.deserialize(bytes, OAuth2RefreshToken.class); } private byte[] serialize(String string) { return serializationStrategy.serialize(string); } private String deserializeString(byte[] bytes) { return serializationStrategy.deserializeString(bytes); } @Override public OAuth2AccessToken getAccessToken(OAuth2Authentication authentication) { String key = authenticationKeyGenerator.extractKey(authentication); byte[] serializedKey = serializeKey(AUTH_TO_ACCESS + key); byte[] bytes = null; RedisConnection conn = getConnection(); try { bytes = conn.get(serializedKey); } finally { conn.close(); } OAuth2AccessToken accessToken = deserializeAccessToken(bytes); if (accessToken != null) { OAuth2Authentication storedAuthentication = readAuthentication(accessToken.getValue()); if ((storedAuthentication == null || !key.equals(authenticationKeyGenerator.extractKey(storedAuthentication)))) { // Keep the stores consistent (maybe the same user is // represented by this authentication but the details have // changed) storeAccessToken(accessToken, authentication); } } return accessToken; } @Override public OAuth2Authentication readAuthentication(OAuth2AccessToken token) { return readAuthentication(token.getValue()); } @Override public OAuth2Authentication readAuthentication(String token) { byte[] bytes = null; RedisConnection conn = getConnection(); try { bytes = conn.get(serializeKey(AUTH + token)); } finally { conn.close(); } OAuth2Authentication auth = deserializeAuthentication(bytes); return auth; } @Override public OAuth2Authentication readAuthenticationForRefreshToken(OAuth2RefreshToken token) { return readAuthenticationForRefreshToken(token.getValue()); } public OAuth2Authentication readAuthenticationForRefreshToken(String token) { RedisConnection conn = getConnection(); try { byte[] bytes = conn.get(serializeKey(REFRESH_AUTH + token)); OAuth2Authentication auth = deserializeAuthentication(bytes); return auth; } finally { conn.close(); } } @Override public void storeAccessToken(OAuth2AccessToken token, OAuth2Authentication authentication) { byte[] serializedAccessToken = serialize(token); byte[] serializedAuth = serialize(authentication); byte[] accessKey = serializeKey(ACCESS + token.getValue()); byte[] authKey = serializeKey(AUTH + token.getValue()); byte[] authToAccessKey = serializeKey(AUTH_TO_ACCESS + authenticationKeyGenerator.extractKey(authentication)); byte[] approvalKey = serializeKey(UNAME_TO_ACCESS + getApprovalKey(authentication)); byte[] clientId = serializeKey(CLIENT_ID_TO_ACCESS + authentication.getOAuth2Request().getClientId()); RedisConnection conn = getConnection(); try { conn.openPipeline(); if (springDataRedis_2_0) { try { this.redisConnectionSet_2_0.invoke(conn, accessKey, serializedAccessToken); this.redisConnectionSet_2_0.invoke(conn, authKey, serializedAuth); this.redisConnectionSet_2_0.invoke(conn, authToAccessKey, serializedAccessToken); } catch (Exception ex) { throw new RuntimeException(ex); } } else { conn.set(accessKey, serializedAccessToken); conn.set(authKey, serializedAuth); conn.set(authToAccessKey, serializedAccessToken); } if (!authentication.isClientOnly()) { conn.sAdd(approvalKey, serializedAccessToken); } conn.sAdd(clientId, serializedAccessToken); if (token.getExpiration() != null) { int seconds = token.getExpiresIn(); conn.expire(accessKey, seconds); conn.expire(authKey, seconds); conn.expire(authToAccessKey, seconds); conn.expire(clientId, seconds); conn.expire(approvalKey, seconds); } OAuth2RefreshToken refreshToken = token.getRefreshToken(); if (refreshToken != null && refreshToken.getValue() != null) { byte[] refresh = serialize(token.getRefreshToken().getValue()); byte[] auth = serialize(token.getValue()); byte[] refreshToAccessKey = serializeKey(REFRESH_TO_ACCESS + token.getRefreshToken().getValue()); byte[] accessToRefreshKey = serializeKey(ACCESS_TO_REFRESH + token.getValue()); if (springDataRedis_2_0) { try { this.redisConnectionSet_2_0.invoke(conn, refreshToAccessKey, auth); this.redisConnectionSet_2_0.invoke(conn, accessToRefreshKey, refresh); } catch (Exception ex) { throw new RuntimeException(ex); } } else { conn.set(refreshToAccessKey, auth); conn.set(accessToRefreshKey, refresh); } if (refreshToken instanceof ExpiringOAuth2RefreshToken) { ExpiringOAuth2RefreshToken expiringRefreshToken = (ExpiringOAuth2RefreshToken) refreshToken; Date expiration = expiringRefreshToken.getExpiration(); if (expiration != null) { int seconds = Long.valueOf((expiration.getTime() - System.currentTimeMillis()) / 1000L) .intValue(); conn.expire(refreshToAccessKey, seconds); conn.expire(accessToRefreshKey, seconds); } } } conn.closePipeline(); } finally { conn.close(); } } private static String getApprovalKey(OAuth2Authentication authentication) { String userName = authentication.getUserAuthentication() == null ? "" : authentication.getUserAuthentication().getName(); return getApprovalKey(authentication.getOAuth2Request().getClientId(), userName); } private static String getApprovalKey(String clientId, String userName) { return clientId + (userName == null ? "" : ":" + userName); } @Override public void removeAccessToken(OAuth2AccessToken accessToken) { removeAccessToken(accessToken.getValue()); } @Override public OAuth2AccessToken readAccessToken(String tokenValue) { byte[] key = serializeKey(ACCESS + tokenValue); byte[] bytes = null; RedisConnection conn = getConnection(); try { bytes = conn.get(key); } finally { conn.close(); } OAuth2AccessToken accessToken = deserializeAccessToken(bytes); return accessToken; } public void removeAccessToken(String tokenValue) { byte[] accessKey = serializeKey(ACCESS + tokenValue); byte[] authKey = serializeKey(AUTH + tokenValue); byte[] accessToRefreshKey = serializeKey(ACCESS_TO_REFRESH + tokenValue); RedisConnection conn = getConnection(); try { conn.openPipeline(); conn.get(accessKey); conn.get(authKey); conn.del(accessKey); conn.del(accessToRefreshKey); // Don't remove the refresh token - it's up to the caller to do that conn.del(authKey); List<Object> results = conn.closePipeline(); byte[] access = (byte[]) results.get(0); byte[] auth = (byte[]) results.get(1); OAuth2Authentication authentication = deserializeAuthentication(auth); if (authentication != null) { String key = authenticationKeyGenerator.extractKey(authentication); byte[] authToAccessKey = serializeKey(AUTH_TO_ACCESS + key); byte[] unameKey = serializeKey(UNAME_TO_ACCESS + getApprovalKey(authentication)); byte[] clientId = serializeKey(CLIENT_ID_TO_ACCESS + authentication.getOAuth2Request().getClientId()); conn.openPipeline(); conn.del(authToAccessKey); conn.sRem(unameKey, access); conn.sRem(clientId, access); conn.del(serialize(ACCESS + key)); conn.closePipeline(); } } finally { conn.close(); } } @Override public void storeRefreshToken(OAuth2RefreshToken refreshToken, OAuth2Authentication authentication) { byte[] refreshKey = serializeKey(REFRESH + refreshToken.getValue()); byte[] refreshAuthKey = serializeKey(REFRESH_AUTH + refreshToken.getValue()); byte[] serializedRefreshToken = serialize(refreshToken); RedisConnection conn = getConnection(); try { conn.openPipeline(); if (springDataRedis_2_0) { try { this.redisConnectionSet_2_0.invoke(conn, refreshKey, serializedRefreshToken); this.redisConnectionSet_2_0.invoke(conn, refreshAuthKey, serialize(authentication)); } catch (Exception ex) { throw new RuntimeException(ex); } } else { conn.set(refreshKey, serializedRefreshToken); conn.set(refreshAuthKey, serialize(authentication)); } if (refreshToken instanceof ExpiringOAuth2RefreshToken) { ExpiringOAuth2RefreshToken expiringRefreshToken = (ExpiringOAuth2RefreshToken) refreshToken; Date expiration = expiringRefreshToken.getExpiration(); if (expiration != null) { int seconds = Long.valueOf((expiration.getTime() - System.currentTimeMillis()) / 1000L) .intValue(); conn.expire(refreshKey, seconds); conn.expire(refreshAuthKey, seconds); } } conn.closePipeline(); } finally { conn.close(); } } @Override public OAuth2RefreshToken readRefreshToken(String tokenValue) { byte[] key = serializeKey(REFRESH + tokenValue); byte[] bytes = null; RedisConnection conn = getConnection(); try { bytes = conn.get(key); } finally { conn.close(); } OAuth2RefreshToken refreshToken = deserializeRefreshToken(bytes); return refreshToken; } @Override public void removeRefreshToken(OAuth2RefreshToken refreshToken) { removeRefreshToken(refreshToken.getValue()); } public void removeRefreshToken(String tokenValue) { byte[] refreshKey = serializeKey(REFRESH + tokenValue); byte[] refreshAuthKey = serializeKey(REFRESH_AUTH + tokenValue); byte[] refresh2AccessKey = serializeKey(REFRESH_TO_ACCESS + tokenValue); byte[] access2RefreshKey = serializeKey(ACCESS_TO_REFRESH + tokenValue); RedisConnection conn = getConnection(); try { conn.openPipeline(); conn.del(refreshKey); conn.del(refreshAuthKey); conn.del(refresh2AccessKey); conn.del(access2RefreshKey); conn.closePipeline(); } finally { conn.close(); } } @Override public void removeAccessTokenUsingRefreshToken(OAuth2RefreshToken refreshToken) { removeAccessTokenUsingRefreshToken(refreshToken.getValue()); } private void removeAccessTokenUsingRefreshToken(String refreshToken) { byte[] key = serializeKey(REFRESH_TO_ACCESS + refreshToken); List<Object> results = null; RedisConnection conn = getConnection(); try { conn.openPipeline(); conn.get(key); conn.del(key); results = conn.closePipeline(); } finally { conn.close(); } if (results == null) { return; } byte[] bytes = (byte[]) results.get(0); String accessToken = deserializeString(bytes); if (accessToken != null) { removeAccessToken(accessToken); } } private List<byte[]> getByteLists(byte[] approvalKey, RedisConnection conn) { List<byte[]> byteList; Long size = conn.sCard(approvalKey); byteList = new ArrayList<byte[]>(size.intValue()); Cursor<byte[]> cursor = conn.sScan(approvalKey, ScanOptions.NONE); while(cursor.hasNext()) { byteList.add(cursor.next()); } return byteList; } @Override public Collection<OAuth2AccessToken> findTokensByClientIdAndUserName(String clientId, String userName) { byte[] approvalKey = serializeKey(UNAME_TO_ACCESS + getApprovalKey(clientId, userName)); List<byte[]> byteList = null; RedisConnection conn = getConnection(); try { byteList = getByteLists(approvalKey, conn); } finally { conn.close(); } if (byteList == null || byteList.size() == 0) { return Collections.<OAuth2AccessToken> emptySet(); } List<OAuth2AccessToken> accessTokens = new ArrayList<OAuth2AccessToken>(byteList.size()); for (byte[] bytes : byteList) { OAuth2AccessToken accessToken = deserializeAccessToken(bytes); accessTokens.add(accessToken); } return Collections.<OAuth2AccessToken> unmodifiableCollection(accessTokens); } @Override public Collection<OAuth2AccessToken> findTokensByClientId(String clientId) { byte[] key = serializeKey(CLIENT_ID_TO_ACCESS + clientId); List<byte[]> byteList = null; RedisConnection conn = getConnection(); try { byteList = getByteLists(key, conn); } finally { conn.close(); } if (byteList == null || byteList.size() == 0) { return Collections.<OAuth2AccessToken> emptySet(); } List<OAuth2AccessToken> accessTokens = new ArrayList<OAuth2AccessToken>(byteList.size()); for (byte[] bytes : byteList) { OAuth2AccessToken accessToken = deserializeAccessToken(bytes); accessTokens.add(accessToken); } return Collections.<OAuth2AccessToken> unmodifiableCollection(accessTokens); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.test; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.util.TestUtil; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.segments.IndexSegments; import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse; import org.elasticsearch.action.admin.indices.segments.ShardSegments; import org.elasticsearch.action.admin.indices.upgrade.get.IndexUpgradeStatus; import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.engine.Segment; import org.elasticsearch.index.shard.MergePolicyConfig; import org.elasticsearch.indices.recovery.RecoverySettings; import java.io.IOException; import java.io.InputStream; import java.nio.file.DirectoryStream; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import static junit.framework.TestCase.assertFalse; import static junit.framework.TestCase.assertTrue; import static org.elasticsearch.test.ESTestCase.randomInt; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.junit.Assert.assertEquals; public class OldIndexUtils { public static List<String> loadIndexesList(String prefix, Path bwcIndicesPath) throws IOException { List<String> indexes = new ArrayList<>(); try (DirectoryStream<Path> stream = Files.newDirectoryStream(bwcIndicesPath, prefix + "-*.zip")) { for (Path path : stream) { indexes.add(path.getFileName().toString()); } } Collections.sort(indexes); return indexes; } public static Settings getSettings() { return Settings.builder() .put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) // disable merging so no segments will be upgraded .put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, 30) // increase recovery speed for small files .build(); } public static void loadIndex(String indexName, String indexFile, Path unzipDir, Path bwcPath, ESLogger logger, Path... paths) throws Exception { Path unzipDataDir = unzipDir.resolve("data"); Path backwardsIndex = bwcPath.resolve(indexFile); // decompress the index try (InputStream stream = Files.newInputStream(backwardsIndex)) { TestUtil.unzip(stream, unzipDir); } // check it is unique assertTrue(Files.exists(unzipDataDir)); Path[] list = FileSystemUtils.files(unzipDataDir); if (list.length != 1) { throw new IllegalStateException("Backwards index must contain exactly one cluster"); } // the bwc scripts packs the indices under this path Path src = list[0].resolve("nodes/0/indices/" + indexName); assertTrue("[" + indexFile + "] missing index dir: " + src.toString(), Files.exists(src)); copyIndex(logger, src, indexName, paths); } public static void assertNotUpgraded(Client client, String... index) throws Exception { for (IndexUpgradeStatus status : getUpgradeStatus(client, index)) { assertTrue("index " + status.getIndex() + " should not be zero sized", status.getTotalBytes() != 0); // TODO: it would be better for this to be strictly greater, but sometimes an extra flush // mysteriously happens after the second round of docs are indexed assertTrue("index " + status.getIndex() + " should have recovered some segments from transaction log", status.getTotalBytes() >= status.getToUpgradeBytes()); assertTrue("index " + status.getIndex() + " should need upgrading", status.getToUpgradeBytes() != 0); } } @SuppressWarnings("unchecked") public static Collection<IndexUpgradeStatus> getUpgradeStatus(Client client, String... indices) throws Exception { UpgradeStatusResponse upgradeStatusResponse = client.admin().indices().prepareUpgradeStatus(indices).get(); assertNoFailures(upgradeStatusResponse); return upgradeStatusResponse.getIndices().values(); } // randomly distribute the files from src over dests paths public static void copyIndex(final ESLogger logger, final Path src, final String indexName, final Path... dests) throws IOException { for (Path dest : dests) { Path indexDir = dest.resolve(indexName); assertFalse(Files.exists(indexDir)); Files.createDirectories(indexDir); } Files.walkFileTree(src, new SimpleFileVisitor<Path>() { @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { Path relativeDir = src.relativize(dir); for (Path dest : dests) { Path destDir = dest.resolve(indexName).resolve(relativeDir); Files.createDirectories(destDir); } return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (file.getFileName().toString().equals(IndexWriter.WRITE_LOCK_NAME)) { // skip lock file, we don't need it logger.trace("Skipping lock file: " + file.toString()); return FileVisitResult.CONTINUE; } Path relativeFile = src.relativize(file); Path destFile = dests[randomInt(dests.length - 1)].resolve(indexName).resolve(relativeFile); logger.trace("--> Moving " + relativeFile.toString() + " to " + destFile.toString()); Files.move(file, destFile); assertFalse(Files.exists(file)); assertTrue(Files.exists(destFile)); return FileVisitResult.CONTINUE; } }); } public static void assertUpgraded(Client client, String... index) throws Exception { for (IndexUpgradeStatus status : getUpgradeStatus(client, index)) { assertTrue("index " + status.getIndex() + " should not be zero sized", status.getTotalBytes() != 0); assertEquals("index " + status.getIndex() + " should be upgraded", 0, status.getToUpgradeBytes()); } // double check using the segments api that all segments are actually upgraded IndicesSegmentResponse segsRsp; if (index == null) { segsRsp = client.admin().indices().prepareSegments().execute().actionGet(); } else { segsRsp = client.admin().indices().prepareSegments(index).execute().actionGet(); } for (IndexSegments indexSegments : segsRsp.getIndices().values()) { for (IndexShardSegments shard : indexSegments) { for (ShardSegments segs : shard.getShards()) { for (Segment seg : segs.getSegments()) { assertEquals("Index " + indexSegments.getIndex() + " has unupgraded segment " + seg.toString(), Version.CURRENT.luceneVersion.major, seg.version.major); assertEquals("Index " + indexSegments.getIndex() + " has unupgraded segment " + seg.toString(), Version.CURRENT.luceneVersion.minor, seg.version.minor); } } } } } public static void assertUpgradeWorks(Client client, String indexName, Version version) throws Exception { if (OldIndexUtils.isLatestLuceneVersion(version) == false) { OldIndexUtils.assertNotUpgraded(client, indexName); } assertNoFailures(client.admin().indices().prepareUpgrade(indexName).get()); assertUpgraded(client, indexName); } public static Version extractVersion(String index) { return Version.fromString(index.substring(index.indexOf('-') + 1, index.lastIndexOf('.'))); } public static boolean isLatestLuceneVersion(Version version) { return version.luceneVersion.major == Version.CURRENT.luceneVersion.major && version.luceneVersion.minor == Version.CURRENT.luceneVersion.minor; } }
package com.centzy.badger.api.impl.server; import com.centzy.badger.api.*; import com.centzy.badger.common.service.BaseBadgerHandler; import com.centzy.badger.core.*; import com.centzy.badger.persistence.BadgerDatabase; import com.centzy.badger.persistence.BadgerTransaction; import com.centzy.badger.validation.BadgerDatabaseValidates; import com.google.common.base.Preconditions; import com.google.common.collect.*; import com.google.inject.Inject; import com.google.protobuf.Message; import javax.annotation.Nullable; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; /** * @author Peter Edge (peter@locality.com). */ class CallableBadgerApiImpl extends BaseBadgerHandler implements BadgerApi { private final BadgerDatabase badgerDatabase; private final BadgerFreeformConverterProvider badgerFreeformConverterProvider; @Inject CallableBadgerApiImpl( BadgerCore badgerCore, @BadgerDatabaseValidates BadgerDatabase badgerDatabase, BadgerFreeformConverterProvider badgerFreeformConverterProvider ) { super(badgerCore); this.badgerDatabase = Preconditions.checkNotNull(badgerDatabase); this.badgerFreeformConverterProvider = Preconditions.checkNotNull(badgerFreeformConverterProvider); } @Override public String getNamespace() { return badgerDatabase.getNamespace(); } @Override public <M extends Message> BadgerEntity<M> get(BadgerEntityDescriptor<M> badgerEntityDescriptor, String entityId) { return Iterables.getOnlyElement(get(badgerEntityDescriptor, ImmutableList.of(entityId))); } @Override public <M extends Message> List<BadgerEntity<M>> get(BadgerEntityDescriptor<M> badgerEntityDescriptor, List<String> entityIds) { return transact(badgerTransaction -> badgerTransaction.get(badgerEntityDescriptor, entityIds) ); } @Nullable @Override public <M extends Message> BadgerEntity<M> getForUniqueQuery( BadgerEntityDescriptor<M> badgerEntityDescriptor, BadgerEqualsExpression<?> badgerEqualsExpression) { return transact(badgerTransaction -> { List<BadgerEntity<M>> badgerEntities = badgerTransaction.getForValues( badgerEntityDescriptor, badgerEqualsExpression.getBadgerTagDescriptor(), ImmutableSet.of(badgerEqualsExpression.getValue()) ); return badgerEntities.isEmpty() ? null : Iterables.getOnlyElement(badgerEntities); } ); } @Override public <M extends Message> List<BadgerEntity<M>> getForQuery(BadgerEntityDescriptor<M> badgerEntityDescriptor, BadgerQuery badgerQuery) { return transact(badgerTransaction -> { List<String> entityIds = getIdsForQuery(badgerTransaction, badgerEntityDescriptor, badgerQuery); return badgerTransaction.get(badgerEntityDescriptor, entityIds); } ); } @Override public <M extends Message> BadgerReadOnlyEntity<M> getReadOnly(BadgerEntityDescriptor<M> badgerEntityDescriptor, String entityId) { return Iterables.getOnlyElement(getReadOnly(badgerEntityDescriptor, ImmutableList.of(entityId))); } @Override public <M extends Message> List<BadgerReadOnlyEntity<M>> getReadOnly( BadgerEntityDescriptor<M> badgerEntityDescriptor, List<String> entityIds) { return transact(badgerTransaction -> getReadOnlyForBadgerEntities(badgerTransaction, badgerTransaction.get(badgerEntityDescriptor, entityIds)) ); } @Nullable @Override public <M extends Message> BadgerReadOnlyEntity<M> getReadOnlyForUniqueQuery( BadgerEntityDescriptor<M> badgerEntityDescriptor, BadgerEqualsExpression<?> badgerEqualsExpression) { return transact(badgerTransaction -> { List<BadgerEntity<M>> badgerEntities = badgerTransaction.getForValues( badgerEntityDescriptor, badgerEqualsExpression.getBadgerTagDescriptor(), ImmutableSet.of(badgerEqualsExpression.getValue()) ); return badgerEntities.isEmpty() ? null : Iterables.getOnlyElement(getReadOnlyForBadgerEntities(badgerTransaction, badgerEntities)); }); } @Override public <M extends Message> List<BadgerReadOnlyEntity<M>> getReadOnlyForQuery( BadgerEntityDescriptor<M> badgerEntityDescriptor, BadgerQuery badgerQuery) { return transact(badgerTransaction -> { List<String> entityIds = getIdsForQuery(badgerTransaction, badgerEntityDescriptor, badgerQuery); return getReadOnlyForBadgerEntities(badgerTransaction, badgerTransaction.get(badgerEntityDescriptor, entityIds)); } ); } private <M extends Message> List<BadgerReadOnlyEntity<M>> getReadOnlyForBadgerEntities( BadgerTransaction badgerTransaction, List<BadgerEntity<M>> badgerEntities) { ImmutableList.Builder<BadgerReadOnlyEntity<M>> builder = ImmutableList.builder(); badgerEntities.forEach(badgerEntity -> { builder.add( new BadgerReadOnlyEntity<>( badgerEntity, getReadOnlyBadgerTagToBadgerEntityForBadgerEntity(badgerTransaction, badgerEntity) ) ); }); return builder.build(); } private ImmutableMap<BadgerTag, BadgerEntity<? extends Message>> getReadOnlyBadgerTagToBadgerEntityForBadgerEntity( BadgerTransaction badgerTransaction, BadgerEntity<? extends Message> badgerEntity) { ImmutableSet<? extends BadgerTagDescriptor> readOnlyBadgerTagDescriptors = badgerEntity.getBadgerMessageDescriptor().getReadOnlyDereferenceTagDescriptors(); ImmutableMap.Builder<BadgerTag, BadgerEntity<? extends Message>> builder = ImmutableMap.builder(); badgerEntity.getBadgerTagToValue().forEach((badgerTag, value) -> { if (readOnlyBadgerTagDescriptors.contains(badgerTag.getBadgerTagDescriptor())) { builder.put( badgerTag, Iterables.getOnlyElement( badgerTransaction.get( badgerTag.getBadgerTagDescriptor().getForeignIdBadgerEntityDescriptor(), ImmutableList.of((String) value) ) ) ); } } ); return builder.build(); } private <M extends Message> List<String> getIdsForQuery( BadgerTransaction badgerTransaction, BadgerEntityDescriptor<M> badgerEntityDescriptor, BadgerQuery badgerQuery) { List<String> entityIds = ImmutableList.copyOf(getIdsForCondition(badgerTransaction, badgerEntityDescriptor, badgerQuery.getBadgerCondition())); if (badgerQuery.getLimit() != null && entityIds.size() > badgerQuery.getLimit()) { entityIds = entityIds.subList(0, badgerQuery.getLimit().intValue()); } return entityIds; } private <M extends Message> Set<String> getIdsForCondition( BadgerTransaction badgerTransaction, BadgerEntityDescriptor<M> badgerEntityDescriptor, BadgerCondition badgerCondition) { switch (badgerCondition.getType()) { case PARENT: List<Set<String>> childrenEntityIds = badgerCondition.getChildren().stream().map(child -> getIdsForCondition(badgerTransaction, badgerEntityDescriptor, child) ).collect(Collectors.toList()); switch (badgerCondition.getOperator()) { case AND: Set<String> andSet = childrenEntityIds.get(0); for (int i = 1; i < childrenEntityIds.size(); i++) { andSet = Sets.intersection(andSet, childrenEntityIds.get(i)); } return andSet; case OR: Set<String> orSet = new HashSet<>(); childrenEntityIds.forEach(orSet::addAll); return orSet; default: throw new IllegalArgumentException(badgerCondition.getOperator().toString()); } case LEAF: return ImmutableSet.copyOf(getIdsforExpression(badgerTransaction, badgerEntityDescriptor, badgerCondition.getBadgerExpression())); default: throw new IllegalArgumentException(badgerCondition.getType().toString()); } } private <M extends Message> List<String> getIdsforExpression( BadgerTransaction badgerTransaction, BadgerEntityDescriptor<M> badgerEntityDescriptor, BadgerExpression<?> badgerExpression) { switch (badgerExpression.getType()) { case EQUALS: return badgerTransaction.getIdsForValues( badgerEntityDescriptor, badgerExpression.getBadgerEqualsExpression().getBadgerTagDescriptor(), ImmutableSet.of(badgerExpression.getBadgerEqualsExpression().getValue()) ); case IN: return badgerTransaction.getIdsForValues( badgerEntityDescriptor, badgerExpression.getBadgerInExpression().getBadgerTagDescriptor(), badgerExpression.getBadgerInExpression().getValues() ); case RANGE: return badgerTransaction.getIdsForBounds( badgerEntityDescriptor, badgerExpression.getBadgerRangeExpression().getBadgerTagDescriptor(), badgerExpression.getBadgerRangeExpression().getMinValue(), badgerExpression.getBadgerRangeExpression().getMaxValue() ); default: throw new IllegalArgumentException(badgerExpression.getType().toString()); } } @Override public String create(BadgerCreateEntity<? extends Message> badgerCreateEntity) { return Iterables.getOnlyElement(save(ImmutableList.of(new BadgerSaveEntity<>(badgerCreateEntity)))); } @Override public String createFreeform(BadgerCreateFreeformEntity<? extends Message, ? extends Message> badgerCreateFreeformEntity) { return Iterables.getOnlyElement(save(ImmutableList.of(new BadgerSaveEntity<>(badgerCreateFreeformEntity)))); } @Override public String mutate(BadgerMutateEntity<? extends Message> badgerMutateEntity) { return Iterables.getOnlyElement(save(ImmutableList.of(new BadgerSaveEntity<>(badgerMutateEntity)))); } @Override public String mutateFreeform(BadgerMutateFreeformEntity<? extends Message, ? extends Message> badgerMutateFreeformEntity) { return Iterables.getOnlyElement(save(ImmutableList.of(new BadgerSaveEntity<>(badgerMutateFreeformEntity)))); } @Override public String archive(BadgerArchiveEntity<? extends Message> badgerArchiveEntity) { return Iterables.getOnlyElement(save(ImmutableList.of(new BadgerSaveEntity<>(badgerArchiveEntity)))); } @Override public List<String> save(List<BadgerSaveEntity<? extends Message>> badgerSaveEntities) { return transact(badgerTransaction -> badgerSaveEntities.stream().map(badgerSaveEntity -> save(badgerTransaction, badgerSaveEntity)).collect(Collectors.toList()) ); } private String save(BadgerTransaction badgerTransaction, BadgerSaveEntity<? extends Message> badgerSaveEntity) { switch (badgerSaveEntity.getType()) { case CREATE: return create(badgerTransaction, badgerSaveEntity.getBadgerCreateEntity()); case CREATE_FREEFORM: return createFreeform(badgerTransaction, badgerSaveEntity.getBadgerCreateFreeformEntity()); case MUTATE: return mutate(badgerTransaction, badgerSaveEntity.getBadgerMutateEntity()); case MUTATE_FREEFORM: return mutateFreeform(badgerTransaction, badgerSaveEntity.getBadgerMutateFreeformEntity()); case ARCHIVE: return archive(badgerTransaction, badgerSaveEntity.getBadgerArchiveEntity()); default: throw new IllegalArgumentException(badgerSaveEntity.getType().toString()); } } private String create(BadgerTransaction badgerTransaction, BadgerCreateEntity<? extends Message> badgerCreateEntity) { return Iterables.getOnlyElement(badgerTransaction.create(ImmutableList.of(badgerCreateEntity.getBadgerMessage()))); } private String createFreeform( BadgerTransaction badgerTransaction, BadgerCreateFreeformEntity<? extends Message, ? extends Message> badgerCreateFreeformEntity) { return Iterables.getOnlyElement(badgerTransaction.create( ImmutableList.of(badgerCore.getBadgerEntity(convert(badgerTransaction, badgerCreateFreeformEntity.getBadgerMessage()))))); } private String mutate(BadgerTransaction badgerTransaction, BadgerMutateEntity<? extends Message> badgerMutateEntity) { BadgerEntity<? extends Message> badgerEntity = badgerMutateEntity.getBadgerMessage(); if (badgerMutateEntity.getBadgerEqualsExpression() != null) { String entityId = getIdForBadgerUniqueEntityQuery( badgerTransaction, badgerMutateEntity.getBadgerMessage().getBadgerMessageDescriptor(), badgerMutateEntity.getBadgerEqualsExpression() ); badgerEntity = badgerEntity.merge( ImmutableMap.of( badgerEntity.getBadgerMessageDescriptor().getIdTag(), entityId ) ); } badgerTransaction.mutate(ImmutableList.of(badgerEntity), badgerMutateEntity.getTagsToUpdate()); return badgerEntity.getId(); } private String mutateFreeform( BadgerTransaction badgerTransaction, BadgerMutateFreeformEntity<? extends Message, ? extends Message> badgerMutateFreeformEntity) { BadgerFreeformEntity<? extends Message, ? extends Message> badgerFreeformEntity = badgerMutateFreeformEntity.getBadgerMessage(); if (badgerMutateFreeformEntity.getBadgerEqualsExpression() != null) { String entityId = getIdForBadgerUniqueEntityQuery( badgerTransaction, badgerMutateFreeformEntity.getBadgerMessage().getBadgerMessageDescriptor().getBadgerEntityDescriptor(), badgerMutateFreeformEntity.getBadgerEqualsExpression() ); badgerFreeformEntity = badgerFreeformEntity.merge( ImmutableMap.of( badgerFreeformEntity.getBadgerMessageDescriptor().getIdTag(), entityId ) ); } badgerTransaction.mutate( ImmutableList.of(badgerCore.getBadgerEntity(convert(badgerTransaction, badgerFreeformEntity))), badgerMutateFreeformEntity.getTagsToUpdate()); return badgerFreeformEntity.getId(); } private String archive(BadgerTransaction badgerTransaction, BadgerArchiveEntity<? extends Message> badgerArchiveEntity) { badgerTransaction.remove(badgerArchiveEntity.getBadgerEntityDescriptor(), ImmutableSet.of(badgerArchiveEntity.getEntityId())); return badgerArchiveEntity.getEntityId(); } @Override public List<String> getAllIds( BadgerEntityDescriptor<? extends Message> badgerEntityDescriptor, @Nullable String lastEntityId, @Nullable Integer limit) { return badgerDatabase.getAllIds(badgerEntityDescriptor, lastEntityId, limit); } // PRIVATE private <O> O transact(Function<BadgerTransaction, O> function) { BadgerTransaction badgerTransaction = badgerDatabase.createTransaction(); O output = null; try { output = function.apply(badgerTransaction); } catch (RuntimeException e) { badgerTransaction.cancel(); throw e; } badgerTransaction.commit(); return output; } private String getIdForBadgerUniqueEntityQuery( BadgerTransaction badgerTransaction, BadgerEntityDescriptor<? extends Message> badgerEntityDescriptor, BadgerEqualsExpression<?> badgerEqualsExpression ) { List<String> ids = badgerTransaction.getIdsForValues( badgerEntityDescriptor, badgerEqualsExpression.getBadgerTagDescriptor(), ImmutableSet.of(badgerEqualsExpression.getValue()) ); return Iterables.getOnlyElement(ids); } private <F extends Message, E extends Message> E convert( BadgerTransaction badgerTransaction, BadgerFreeformEntity<F, E> badgerFreeformEntity) { return badgerFreeformConverterProvider.getBadgerFreeformConverter( badgerFreeformEntity.getBadgerMessageDescriptor().getMessageClass(), badgerFreeformEntity.getBadgerMessageDescriptor().getBadgerEntityDescriptor().getMessageClass() ).convert(badgerTransaction, badgerFreeformConverterProvider, badgerFreeformEntity.getMessage()); } }
/* * Copyright (c) OSGi Alliance (2004, 2011). All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.osgi.service.cm; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.ObjectStreamField; import java.security.BasicPermission; import java.security.Permission; import java.security.PermissionCollection; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; /** * Indicates a bundle's authority to configure bundles or be updated by * Configuration Admin. * * @ThreadSafe * @version $Id: 0d700c494f2dc2bbe05165bf5c79fe185c9f0a4a $ * @since 1.2 */ public final class ConfigurationPermission extends BasicPermission { static final long serialVersionUID = 5716868734811965383L; /** * Provides permission to create new configurations for other bundles as * well as manipulate them. The action string {@value #CONFIGURE}. */ public final static String CONFIGURE = "configure"; /** * The permission to be updated, that is, act as a Managed Service or * Managed Service Factory. The action string {@value #TARGET}. * * @since 1.4 */ public final static String TARGET = "target"; private final static int ACTION_CONFIGURE = 0x00000001; private final static int ACTION_TARGET = 0x00000002; private final static int ACTION_ALL = ACTION_CONFIGURE | ACTION_TARGET; final static int ACTION_NONE = 0; /** * The actions mask. */ transient int action_mask; /** * The actions in canonical form. * * @serial */ private volatile String actions = null; /** * Parsed name if it includes wildcards: "*" */ private transient List substrings; /** * Create a new ConfigurationPermission. * * @param name Name of the permission. Wildcards ({@code '*'}) are allowed * in the name. During {@link #implies(Permission)}, the name is * matched to the requested permission using the substring matching * rules used by {@link Filter}s. * @param actions Comma separated list of {@link #CONFIGURE}, * {@link #TARGET}. */ public ConfigurationPermission(String name, String actions) { this(name, parseActions(actions)); } /** * Package private constructor used by ConfigurationPermissionCollection. * * @param name location string * @param mask action mask */ ConfigurationPermission(String name, int mask) { super(name); setTransients(mask); } /** * Called by constructors and when deserialized. * * @param mask action mask */ private void setTransients(int mask) { if ((mask == ACTION_NONE) || ((mask & ACTION_ALL) != mask)) { throw new IllegalArgumentException("invalid action string"); } action_mask = mask; substrings = parseSubstring(getName()); } /** * Parse action string into action mask. * * @param actions Action string. * @return action mask. */ private static int parseActions(String actions) { boolean seencomma = false; int mask = ACTION_NONE; if (actions == null) { return mask; } char[] a = actions.toCharArray(); int i = a.length - 1; if (i < 0) return mask; while (i != -1) { char c; // skip whitespace while ((i != -1) && ((c = a[i]) == ' ' || c == '\r' || c == '\n' || c == '\f' || c == '\t')) i--; // check for the known strings int matchlen; if (i >= 5 && (a[i - 5] == 't' || a[i - 5] == 'T') && (a[i - 4] == 'a' || a[i - 4] == 'A') && (a[i - 3] == 'r' || a[i - 3] == 'R') && (a[i - 2] == 'g' || a[i - 2] == 'G') && (a[i - 1] == 'e' || a[i - 1] == 'E') && (a[i] == 't' || a[i] == 'T')) { matchlen = 6; mask |= ACTION_TARGET; } else if (i >= 8 && (a[i - 8] == 'c' || a[i - 8] == 'C') && (a[i - 7] == 'o' || a[i - 7] == 'O') && (a[i - 6] == 'n' || a[i - 6] == 'N') && (a[i - 5] == 'f' || a[i - 5] == 'F') && (a[i - 4] == 'i' || a[i - 4] == 'I') && (a[i - 3] == 'g' || a[i - 3] == 'G') && (a[i - 2] == 'u' || a[i - 2] == 'U') && (a[i - 1] == 'r' || a[i - 1] == 'R') && (a[i] == 'e' || a[i] == 'E')) { matchlen = 9; mask |= ACTION_CONFIGURE; } else { // parse error throw new IllegalArgumentException("invalid actions: " + actions); } // make sure we didn't just match the tail of a word // like "ackbarftarget". Also, skip to the comma. seencomma = false; while (i >= matchlen && !seencomma) { switch (a[i - matchlen]) { case ',' : seencomma = true; /* FALLTHROUGH */ case ' ' : case '\r' : case '\n' : case '\f' : case '\t' : break; default : throw new IllegalArgumentException( "invalid permission: " + actions); } i--; } // point i at the location of the comma minus one (or -1). i -= matchlen; } if (seencomma) { throw new IllegalArgumentException("invalid actions: " + actions); } return mask; } /** * Parse the name for wildcard processing. * * @param name The name of the permission. * @return {@code null} is the name has no wildcards or a * {@code List<String>} where element is a substring to match or * null for {@code '*'}. */ private static List parseSubstring(String name) { if (name.indexOf('*') < 0) { return null; } char[] chars = name.toCharArray(); StringBuffer sb = new StringBuffer(chars.length); List sub = new ArrayList(10); for (int pos = 0; pos < chars.length; pos++) { char c = chars[pos]; switch (c) { case '*' : { if (sb.length() > 0) { sub.add(sb.toString()); } sb.setLength(0); sub.add(null); break; } case '\\' : { pos++; if (pos < chars.length) { c = chars[pos]; } /* fall through into default */ } default : { sb.append(c); break; } } } if (sb.length() > 0) { sub.add(sb.toString()); } int size = sub.size(); if (size == 0) { return null; } if (size == 1) { if (sub.get(0) != null) { return null; } } return sub; } /** * Determines if a {@code ConfigurationPermission} object "implies" the * specified permission. * * @param p The target permission to check. * @return {@code true} if the specified permission is implied by this * object; {@code false} otherwise. */ public boolean implies(Permission p) { if (!(p instanceof ConfigurationPermission)) { return false; } ConfigurationPermission requested = (ConfigurationPermission) p; return implies0(requested, ACTION_NONE); } /** * Internal implies method. Used by the implies and the permission * collection implies methods. * * @param requested The requested ConfigurationPermission which has already * be validated as a proper argument. * @param effective The effective actions with which to start. * @return {@code true} if the specified permission is implied by this * object; {@code false} otherwise. */ boolean implies0(ConfigurationPermission requested, int effective) { /* check actions first - much faster */ effective |= action_mask; final int desired = requested.action_mask; if ((effective & desired) != desired) { return false; } String requestedName = requested.getName(); if (substrings == null) { return getName().equals(requestedName); } for (int i = 0, pos = 0, size = substrings.size(); i < size; i++) { String substr = (String) substrings.get(i); if (i + 1 < size) /* if this is not that last substr */{ if (substr == null) /* * */{ String substr2 = (String) substrings.get(i + 1); if (substr2 == null) /* ** */ continue; /* ignore first star */ /* xxx */ int index = requestedName.indexOf(substr2, pos); if (index == -1) { return false; } pos = index + substr2.length(); if (i + 2 < size) // if there are more // substrings, increment // over the string we just // matched; otherwise need // to do the last substr // check i++; } else /* xxx */{ int len = substr.length(); if (requestedName.regionMatches(pos, substr, 0, len)) { pos += len; } else { return false; } } } else /* last substr */{ if (substr == null) /* * */{ return true; } /* xxx */ return requestedName.endsWith(substr); } } return false; } /** * Determines the equality of two {@code ConfigurationPermission} objects. * <p> * Two {@code ConfigurationPermission} objects are equal. * * @param obj The object being compared for equality with this object. * @return {@code true} if {@code obj} is equivalent to this * {@code ConfigurationPermission}; {@code false} otherwise. */ public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof ConfigurationPermission)) { return false; } ConfigurationPermission cp = (ConfigurationPermission) obj; return (action_mask == cp.action_mask) && getName().equals(cp.getName()); } /** * Returns the hash code value for this object. * * @return Hash code value for this object. */ public int hashCode() { int h = 31 * 17 + getName().hashCode(); h = 31 * h + getActions().hashCode(); return h; } /** * Returns the canonical string representation of the * {@code ConfigurationPermission} actions. * * <p> * Always returns present {@code ConfigurationPermission} actions in the * following order: {@value #CONFIGURE}, {@value #TARGET} * * @return Canonical string representation of the * {@code ConfigurationPermission} actions. */ public String getActions() { String result = actions; if (result == null) { StringBuffer sb = new StringBuffer(); boolean comma = false; int mask = action_mask; if ((mask & ACTION_CONFIGURE) == ACTION_CONFIGURE) { sb.append(CONFIGURE); comma = true; } if ((mask & ACTION_TARGET) == ACTION_TARGET) { if (comma) sb.append(','); sb.append(TARGET); } actions = result = sb.toString(); } return result; } /** * Returns a new {@code PermissionCollection} object suitable for storing * {@code ConfigurationPermission}s. * * @return A new {@code PermissionCollection} object. */ public PermissionCollection newPermissionCollection() { return new ConfigurationPermissionCollection(); } /** * WriteObject is called to save the state of this permission object to a * stream. The actions are serialized, and the superclass takes care of the * name. */ private synchronized void writeObject(java.io.ObjectOutputStream s) throws IOException { // Write out the actions. The superclass takes care of the name // call getActions to make sure actions field is initialized if (actions == null) getActions(); s.defaultWriteObject(); } /** * readObject is called to restore the state of this permission from a * stream. */ private synchronized void readObject(java.io.ObjectInputStream s) throws IOException, ClassNotFoundException { // Read in the data, then initialize the transients s.defaultReadObject(); setTransients(parseActions(actions)); } } /** * Stores a set of {@code ConfigurationPermission} permissions. * * @see java.security.Permission * @see java.security.Permissions * @see java.security.PermissionCollection */ final class ConfigurationPermissionCollection extends PermissionCollection { static final long serialVersionUID = -6917638867081695839L; /** * Collection of permissions. * * @serial * @GuardedBy this */ private Map permissions; /** * Boolean saying if "*" is in the collection. * * @serial * @GuardedBy this */ private boolean all_allowed; /** * Creates an empty {@code ConfigurationPermissionCollection} object. * */ public ConfigurationPermissionCollection() { permissions = new HashMap(); all_allowed = false; } /** * Adds the specified permission to the * {@code ConfigurationPermissionCollection}. The key for the hash is the * interface name of the service. * * @param permission The {@code Permission} object to add. * * @exception IllegalArgumentException If the permission is not an * {@code ConfigurationPermission}. * * @exception SecurityException If this ConfigurationPermissionCollection * object has been marked read-only. */ public void add(Permission permission) { if (!(permission instanceof ConfigurationPermission)) { throw new IllegalArgumentException("invalid permission: " + permission); } if (isReadOnly()) throw new SecurityException("attempt to add a Permission to a " + "readonly PermissionCollection"); final ConfigurationPermission cp = (ConfigurationPermission) permission; final String name = cp.getName(); synchronized (this) { Map pc = permissions; final ConfigurationPermission existing = (ConfigurationPermission) pc.get(name); if (existing != null) { final int oldMask = existing.action_mask; final int newMask = cp.action_mask; if (oldMask != newMask) { pc.put(name, new ConfigurationPermission(name, oldMask | newMask)); } } else { pc.put(name, cp); } if (!all_allowed) { if (name.equals("*")) { all_allowed = true; } } } } /** * Determines if the specified permissions implies the permissions expressed * in {@code permission}. * * @param permission The Permission object to compare with this * {@code ConfigurationPermission} object. * @return {@code true} if {@code permission} is a proper subset of a * permission in the set; {@code false} otherwise. */ public boolean implies(Permission permission) { if (!(permission instanceof ConfigurationPermission)) { return false; } final ConfigurationPermission requested = (ConfigurationPermission) permission; int effective = ConfigurationPermission.ACTION_NONE; Collection perms; synchronized (this) { Map pc = permissions; /* short circuit if the "*" Permission was added */ if (all_allowed) { ConfigurationPermission cp = (ConfigurationPermission) pc.get("*"); if (cp != null) { effective |= cp.action_mask; final int desired = requested.action_mask; if ((effective & desired) == desired) { return true; } } } perms = pc.values(); } /* iterate one by one over permissions */ for (Iterator permI = perms.iterator(); permI.hasNext(); ) { ConfigurationPermission perm = (ConfigurationPermission) permI.next(); if (perm.implies0(requested, effective)) { return true; } } return false; } /** * Returns an enumeration of all {@code ConfigurationPermission} objects in * the container. * * @return Enumeration of all {@code ConfigurationPermission} objects. */ public synchronized Enumeration elements() { List all = new ArrayList(permissions.values()); return Collections.enumeration(all); } /* serialization logic */ private static final ObjectStreamField[] serialPersistentFields = { new ObjectStreamField("hasElement", Boolean.TYPE), new ObjectStreamField("permissions", HashMap.class), new ObjectStreamField("all_allowed", Boolean.TYPE) }; private synchronized void writeObject(ObjectOutputStream out) throws IOException { ObjectOutputStream.PutField pfields = out.putFields(); pfields.put("hasElement", false); pfields.put("permissions", permissions); pfields.put("all_allowed", all_allowed); out.writeFields(); } private synchronized void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException { ObjectInputStream.GetField gfields = in.readFields(); boolean hasElement = gfields.get("hasElement", false); if (hasElement) { // old format permissions = new HashMap(); permissions.put("*", new ConfigurationPermission("*", ConfigurationPermission.CONFIGURE)); all_allowed = true; } else { permissions = (HashMap) gfields .get("permissions", new HashMap()); all_allowed = gfields.get("all_allowed", false); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase; import java.nio.ByteBuffer; import java.util.Comparator; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.primitives.Longs; /** * Compare two HBase cells. Do not use this method comparing <code>-ROOT-</code> or * <code>hbase:meta</code> cells. Cells from these tables need a specialized comparator, one that * takes account of the special formatting of the row where we have commas to delimit table from * regionname, from row. See KeyValue for how it has a special comparator to do hbase:meta cells * and yet another for -ROOT-. * <p>While using this comparator for {{@link #compareRows(Cell, Cell)} et al, the hbase:meta cells * format should be taken into consideration, for which the instance of this comparator * should be used. In all other cases the static APIs in this comparator would be enough * <p>HOT methods. We spend a good portion of CPU comparing. Anything that makes the compare * faster will likely manifest at the macro level. See also * {@link BBKVComparator}. Use it when mostly {@link ByteBufferKeyValue}s. * </p> */ @edu.umd.cs.findbugs.annotations.SuppressWarnings( value="UNKNOWN", justification="Findbugs doesn't like the way we are negating the result of a compare in below") @InterfaceAudience.Private @InterfaceStability.Evolving public class CellComparatorImpl implements CellComparator { static final Logger LOG = LoggerFactory.getLogger(CellComparatorImpl.class); /** * Comparator for plain key/values; i.e. non-catalog table key/values. Works on Key portion * of KeyValue only. */ public static final CellComparatorImpl COMPARATOR = new CellComparatorImpl(); /** * A {@link CellComparatorImpl} for <code>hbase:meta</code> catalog table * {@link KeyValue}s. */ public static final CellComparatorImpl META_COMPARATOR = new MetaCellComparator(); @Override public final int compare(final Cell a, final Cell b) { return compare(a, b, false); } @Override public int compare(final Cell a, final Cell b, boolean ignoreSequenceid) { int diff = 0; // "Peel off" the most common path. if (a instanceof ByteBufferKeyValue && b instanceof ByteBufferKeyValue) { diff = BBKVComparator.compare((ByteBufferKeyValue)a, (ByteBufferKeyValue)b, ignoreSequenceid); if (diff != 0) { return diff; } } else { diff = compareRows(a, b); if (diff != 0) { return diff; } diff = compareWithoutRow(a, b); if (diff != 0) { return diff; } } // Negate following comparisons so later edits show up first mvccVersion: later sorts first return ignoreSequenceid? diff: Long.compare(b.getSequenceId(), a.getSequenceId()); } /** * Compares the family and qualifier part of the cell * @return 0 if both cells are equal, 1 if left cell is bigger than right, -1 otherwise */ public final int compareColumns(final Cell left, final Cell right) { int diff = compareFamilies(left, right); if (diff != 0) { return diff; } return compareQualifiers(left, right); } /** * Compare the families of left and right cell * @return 0 if both cells are equal, 1 if left cell is bigger than right, -1 otherwise */ @Override public final int compareFamilies(Cell left, Cell right) { if (left instanceof ByteBufferExtendedCell && right instanceof ByteBufferExtendedCell) { return ByteBufferUtils.compareTo(((ByteBufferExtendedCell) left).getFamilyByteBuffer(), ((ByteBufferExtendedCell) left).getFamilyPosition(), left.getFamilyLength(), ((ByteBufferExtendedCell) right).getFamilyByteBuffer(), ((ByteBufferExtendedCell) right).getFamilyPosition(), right.getFamilyLength()); } if (left instanceof ByteBufferExtendedCell) { return ByteBufferUtils.compareTo(((ByteBufferExtendedCell) left).getFamilyByteBuffer(), ((ByteBufferExtendedCell) left).getFamilyPosition(), left.getFamilyLength(), right.getFamilyArray(), right.getFamilyOffset(), right.getFamilyLength()); } if (right instanceof ByteBufferExtendedCell) { // Notice how we flip the order of the compare here. We used to negate the return value but // see what FindBugs says // http://findbugs.sourceforge.net/bugDescriptions.html#RV_NEGATING_RESULT_OF_COMPARETO // It suggest flipping the order to get same effect and 'safer'. return ByteBufferUtils.compareTo( left.getFamilyArray(), left.getFamilyOffset(), left.getFamilyLength(), ((ByteBufferExtendedCell)right).getFamilyByteBuffer(), ((ByteBufferExtendedCell)right).getFamilyPosition(), right.getFamilyLength()); } return Bytes.compareTo(left.getFamilyArray(), left.getFamilyOffset(), left.getFamilyLength(), right.getFamilyArray(), right.getFamilyOffset(), right.getFamilyLength()); } /** * Compare the qualifiers part of the left and right cells. * @return 0 if both cells are equal, 1 if left cell is bigger than right, -1 otherwise */ @Override public final int compareQualifiers(Cell left, Cell right) { if (left instanceof ByteBufferExtendedCell && right instanceof ByteBufferExtendedCell) { return ByteBufferUtils .compareTo(((ByteBufferExtendedCell) left).getQualifierByteBuffer(), ((ByteBufferExtendedCell) left).getQualifierPosition(), left.getQualifierLength(), ((ByteBufferExtendedCell) right).getQualifierByteBuffer(), ((ByteBufferExtendedCell) right).getQualifierPosition(), right.getQualifierLength()); } if (left instanceof ByteBufferExtendedCell) { return ByteBufferUtils.compareTo(((ByteBufferExtendedCell) left).getQualifierByteBuffer(), ((ByteBufferExtendedCell) left).getQualifierPosition(), left.getQualifierLength(), right.getQualifierArray(), right.getQualifierOffset(), right.getQualifierLength()); } if (right instanceof ByteBufferExtendedCell) { // Notice how we flip the order of the compare here. We used to negate the return value but // see what FindBugs says // http://findbugs.sourceforge.net/bugDescriptions.html#RV_NEGATING_RESULT_OF_COMPARETO // It suggest flipping the order to get same effect and 'safer'. return ByteBufferUtils.compareTo(left.getQualifierArray(), left.getQualifierOffset(), left.getQualifierLength(), ((ByteBufferExtendedCell)right).getQualifierByteBuffer(), ((ByteBufferExtendedCell)right).getQualifierPosition(), right.getQualifierLength()); } return Bytes.compareTo(left.getQualifierArray(), left.getQualifierOffset(), left.getQualifierLength(), right.getQualifierArray(), right.getQualifierOffset(), right.getQualifierLength()); } /** * Compares the rows of the left and right cell. * For the hbase:meta case this method is overridden such that it can handle hbase:meta cells. * The caller should ensure using the appropriate comparator for hbase:meta. * @return 0 if both cells are equal, 1 if left cell is bigger than right, -1 otherwise */ @Override public int compareRows(final Cell left, final Cell right) { return compareRows(left, left.getRowLength(), right, right.getRowLength()); } static int compareRows(final Cell left, int leftRowLength, final Cell right, int rightRowLength) { // left and right can be exactly the same at the beginning of a row if (left == right) { return 0; } if (left instanceof ByteBufferExtendedCell && right instanceof ByteBufferExtendedCell) { return ByteBufferUtils.compareTo(((ByteBufferExtendedCell) left).getRowByteBuffer(), ((ByteBufferExtendedCell) left).getRowPosition(), leftRowLength, ((ByteBufferExtendedCell) right).getRowByteBuffer(), ((ByteBufferExtendedCell) right).getRowPosition(), rightRowLength); } if (left instanceof ByteBufferExtendedCell) { return ByteBufferUtils.compareTo(((ByteBufferExtendedCell) left).getRowByteBuffer(), ((ByteBufferExtendedCell) left).getRowPosition(), leftRowLength, right.getRowArray(), right.getRowOffset(), rightRowLength); } if (right instanceof ByteBufferExtendedCell) { // Notice how we flip the order of the compare here. We used to negate the return value but // see what FindBugs says // http://findbugs.sourceforge.net/bugDescriptions.html#RV_NEGATING_RESULT_OF_COMPARETO // It suggest flipping the order to get same effect and 'safer'. return ByteBufferUtils.compareTo(left.getRowArray(), left.getRowOffset(), leftRowLength, ((ByteBufferExtendedCell)right).getRowByteBuffer(), ((ByteBufferExtendedCell)right).getRowPosition(), rightRowLength); } return Bytes.compareTo(left.getRowArray(), left.getRowOffset(), left.getRowLength(), right.getRowArray(), right.getRowOffset(), right.getRowLength()); } /** * Compares the row part of the cell with a simple plain byte[] like the * stopRow in Scan. This should be used with context where for hbase:meta * cells the {{@link #META_COMPARATOR} should be used * * @param left * the cell to be compared * @param right * the kv serialized byte[] to be compared with * @param roffset * the offset in the byte[] * @param rlength * the length in the byte[] * @return 0 if both cell and the byte[] are equal, 1 if the cell is bigger * than byte[], -1 otherwise */ @Override public int compareRows(Cell left, byte[] right, int roffset, int rlength) { if (left instanceof ByteBufferExtendedCell) { return ByteBufferUtils.compareTo(((ByteBufferExtendedCell) left).getRowByteBuffer(), ((ByteBufferExtendedCell) left).getRowPosition(), left.getRowLength(), right, roffset, rlength); } return Bytes.compareTo(left.getRowArray(), left.getRowOffset(), left.getRowLength(), right, roffset, rlength); } @Override public final int compareWithoutRow(final Cell left, final Cell right) { // If the column is not specified, the "minimum" key type appears the // latest in the sorted order, regardless of the timestamp. This is used // for specifying the last key/value in a given row, because there is no // "lexicographically last column" (it would be infinitely long). The // "maximum" key type does not need this behavior. // Copied from KeyValue. This is bad in that we can't do memcmp w/ special rules like this. int lFamLength = left.getFamilyLength(); int rFamLength = right.getFamilyLength(); int lQualLength = left.getQualifierLength(); int rQualLength = right.getQualifierLength(); if (lFamLength + lQualLength == 0 && left.getTypeByte() == Type.Minimum.getCode()) { // left is "bigger", i.e. it appears later in the sorted order return 1; } if (rFamLength + rQualLength == 0 && right.getTypeByte() == Type.Minimum.getCode()) { return -1; } if (lFamLength != rFamLength) { // comparing column family is enough. return compareFamilies(left, right); } // Compare cf:qualifier int diff = compareColumns(left, right); if (diff != 0) { return diff; } diff = compareTimestamps(left.getTimestamp(), right.getTimestamp()); if (diff != 0) { return diff; } // Compare types. Let the delete types sort ahead of puts; i.e. types // of higher numbers sort before those of lesser numbers. Maximum (255) // appears ahead of everything, and minimum (0) appears after // everything. return (0xff & right.getTypeByte()) - (0xff & left.getTypeByte()); } @Override public int compareTimestamps(final Cell left, final Cell right) { return compareTimestamps(left.getTimestamp(), right.getTimestamp()); } @Override public int compareTimestamps(final long ltimestamp, final long rtimestamp) { // Swap order we pass into compare so we get DESCENDING order. return Long.compare(rtimestamp, ltimestamp); } /** * A {@link CellComparatorImpl} for <code>hbase:meta</code> catalog table * {@link KeyValue}s. */ public static class MetaCellComparator extends CellComparatorImpl { // TODO: Do we need a ByteBufferKeyValue version of this? @Override public int compareRows(final Cell left, final Cell right) { return compareRows(left.getRowArray(), left.getRowOffset(), left.getRowLength(), right.getRowArray(), right.getRowOffset(), right.getRowLength()); } @Override public int compareRows(Cell left, byte[] right, int roffset, int rlength) { return compareRows(left.getRowArray(), left.getRowOffset(), left.getRowLength(), right, roffset, rlength); } @Override public int compare(final Cell a, final Cell b, boolean ignoreSequenceid) { int diff = compareRows(a, b); if (diff != 0) { return diff; } diff = compareWithoutRow(a, b); if (diff != 0) { return diff; } // Negate following comparisons so later edits show up first mvccVersion: later sorts first return ignoreSequenceid? diff: Longs.compare(b.getSequenceId(), a.getSequenceId()); } private static int compareRows(byte[] left, int loffset, int llength, byte[] right, int roffset, int rlength) { int leftDelimiter = Bytes.searchDelimiterIndex(left, loffset, llength, HConstants.DELIMITER); int rightDelimiter = Bytes .searchDelimiterIndex(right, roffset, rlength, HConstants.DELIMITER); // Compare up to the delimiter int lpart = (leftDelimiter < 0 ? llength : leftDelimiter - loffset); int rpart = (rightDelimiter < 0 ? rlength : rightDelimiter - roffset); int result = Bytes.compareTo(left, loffset, lpart, right, roffset, rpart); if (result != 0) { return result; } else { if (leftDelimiter < 0 && rightDelimiter >= 0) { return -1; } else if (rightDelimiter < 0 && leftDelimiter >= 0) { return 1; } else if (leftDelimiter < 0) { return 0; } } // Compare middle bit of the row. // Move past delimiter leftDelimiter++; rightDelimiter++; int leftFarDelimiter = Bytes.searchDelimiterIndexInReverse(left, leftDelimiter, llength - (leftDelimiter - loffset), HConstants.DELIMITER); int rightFarDelimiter = Bytes.searchDelimiterIndexInReverse(right, rightDelimiter, rlength - (rightDelimiter - roffset), HConstants.DELIMITER); // Now compare middlesection of row. lpart = (leftFarDelimiter < 0 ? llength + loffset : leftFarDelimiter) - leftDelimiter; rpart = (rightFarDelimiter < 0 ? rlength + roffset : rightFarDelimiter) - rightDelimiter; result = Bytes.compareTo(left, leftDelimiter, lpart, right, rightDelimiter, rpart); if (result != 0) { return result; } else { if (leftDelimiter < 0 && rightDelimiter >= 0) { return -1; } else if (rightDelimiter < 0 && leftDelimiter >= 0) { return 1; } else if (leftDelimiter < 0) { return 0; } } // Compare last part of row, the rowid. leftFarDelimiter++; rightFarDelimiter++; result = Bytes.compareTo(left, leftFarDelimiter, llength - (leftFarDelimiter - loffset), right, rightFarDelimiter, rlength - (rightFarDelimiter - roffset)); return result; } @Override public int compareRows(ByteBuffer row, Cell cell) { byte [] array; int offset; int len = row.remaining(); if (row.hasArray()) { array = row.array(); offset = row.position() + row.arrayOffset(); } else { // We copy the row array if offheap just so we can do a compare. We do this elsewhere too // in BBUtils when Cell is backed by an offheap ByteBuffer. Needs fixing so no copy. TODO. array = new byte[len]; offset = 0; ByteBufferUtils.copyFromBufferToArray(array, row, row.position(), 0, len); } // Reverse result since we swap the order of the params we pass below. return -compareRows(cell, array, offset, len); } @Override public Comparator getSimpleComparator() { return this; } } @Override public Comparator getSimpleComparator() { return new BBKVComparator(this); } /** * Utility method that makes a guess at comparator to use based off passed tableName. * Use in extreme when no comparator specified. * @return CellComparator to use going off the {@code tableName} passed. */ public static CellComparator getCellComparator(TableName tableName) { return getCellComparator(tableName.toBytes()); } /** * Utility method that makes a guess at comparator to use based off passed tableName. * Use in extreme when no comparator specified. * @return CellComparator to use going off the {@code tableName} passed. */ public static CellComparator getCellComparator(byte [] tableName) { // FYI, TableName.toBytes does not create an array; just returns existing array pointer. return Bytes.equals(tableName, TableName.META_TABLE_NAME.toBytes())? CellComparatorImpl.META_COMPARATOR: CellComparatorImpl.COMPARATOR; } }
package com.util; import java.util.*; import java.util.regex.Pattern; import java.util.regex.Matcher; import java.io.*; /** * @author Chris Park @ Infinetix Corp * Date: 2-3-2021 * Description: Singleton class used to configure the arduino-cli and upload * APM sketches from the dashboard using cmd call subprocesses. */ public class ACLIManager { private static ACLIManager aclimInstance = null; private static final String CMD_EXEC = "cmd.exe"; private static final String TERM_FLAG = "/c"; private static final String ACLI_EXEC = "arduino-cli"; private static final String ACLI_PATH = ""; private static final String BOARD_LIST_FILE = "boardlist.txt"; private static final String SKETCH_CONFIG_FILE = "sketch.json"; private ProcessBuilder processBuilder; private List<String> params; private String port; private String core; private String errorString; /** * Description: Enum used to create, package, and contain all necessary information * to run an Arduino CLI command through the ACLIManager (Arduino CLI Manager). */ public enum ACLICommand { //Installs required core for APM INSTALL_AVR_CORE (Arrays.asList("core", "install", "arduino:avr")), //Generates a list of attached Arduino Boards GENERATE_BOARD_LIST (Arrays.asList("board", "list", ">", BOARD_LIST_FILE)), //Parses the port and core of the APM as it appears in the board list file PARSE_BOARD_INFO (Arrays.asList(BOARD_LIST_FILE)), //Compiles and prepares a sketch for upload COMPILE_SKETCH (Arrays.asList("compile")), //Uploads a sketch to the APM UPLOAD_SKETCH (Arrays.asList("upload")); public final List<String> params; ACLICommand(List<String> params) { this.params = params; } }; /** * Constructor (Private, accessed by getInstance) */ private ACLIManager() { port = ""; core = ""; errorString = "No error."; } /** * Returns the singleton instance of this class to be used system wide. * @return The Arduino CLI Command manager instance. */ public static ACLIManager getInstance() { if(aclimInstance == null) { aclimInstance = new ACLIManager(); } return aclimInstance; } /** * Executes the given ACLICommand object. as a command line operation using * the Java ProcessBuilder class. * @param command - The action/command to execute */ public boolean execute(ACLICommand command) { params = new ArrayList<String>(); params.addAll(0, Arrays.asList(CMD_EXEC, TERM_FLAG, ACLI_EXEC, ACLI_PATH)); params.addAll(command.params); switch(command) { case INSTALL_AVR_CORE: //Go straight to exectuion break; case GENERATE_BOARD_LIST: //Go straight to execution break; case PARSE_BOARD_INFO: if(parseInfo()) { params.add(port); } else { errorString = "Could not obtain port and core information."; System.err.println("ACLI Manager parse error: " + "parsing operation unsuccessful"); return false; } break; default: } processBuilder = new ProcessBuilder(); processBuilder.command(params); try { Process process = processBuilder.start(); if(process.waitFor() == 0) { errorString = "No error."; } } catch (Exception e) { switch(command) { case INSTALL_AVR_CORE: errorString = "Unable to install avr core."; break; case GENERATE_BOARD_LIST: errorString = "Unable to generate board list."; break; case PARSE_BOARD_INFO: errorString = "Unable to parse board info."; break; default: errorString = "Unknown error"; } System.err.println("ACLI Manager exec error: " + e); return false; } return true; } /** * Executes the given ACLICommand object. as a command line operation using * the Java ProcessBuilder class. Overloaded from previous function to accept a * sketch path. * @param command - The action/command to execute * @param sketchPath - The user selected path to the sketch being used. */ public boolean execute(ACLICommand command, String sketchPath) { params = new ArrayList<String>(); params.addAll(0, Arrays.asList(CMD_EXEC, TERM_FLAG, ACLI_EXEC, ACLI_PATH)); params.addAll(command.params); switch(command) { case COMPILE_SKETCH: if(core.isEmpty() || port.isEmpty()) { errorString = "Necessary board information (port or core) missing. " + "Unable to compile."; return false; } params.add("-b " + core); params.add("-p " + port); params.add(sketchPath); break; case UPLOAD_SKETCH: if(port.isEmpty()) { errorString = "port information missing. " + "Unable to upload sketch."; return false; } params.add("-p " + port); params.add(sketchPath); break; default: } processBuilder = new ProcessBuilder(); processBuilder.command(params); try { Process process = processBuilder.start(); if(process.waitFor() == 0) { errorString = "No error."; } } catch (Exception e) { switch(command) { case COMPILE_SKETCH: errorString = "Unable to compile sketch."; break; case UPLOAD_SKETCH: errorString = "Unable to upload sketch."; break; default: errorString = "Unknown error"; } System.err.println("ACLI Manager exec error: " + e); return false; } return true; } /** * Reads the board list file of attached arduino devices and parses * out the required port name and core type for an APM module. These are * requirements needed for arduino-cli to compile and upload a sketch. * @return - Whether or not the operation was successful. */ public boolean parseInfo() { //The APM shows up as an avr core. Here is a Regex target line example: //"COM4 Serial Port (USB) Arduino Mega or Mega 2560 arduino:avr:mega arduino:avr" Pattern pattern = Pattern.compile( "^COM\\d+\\s\\w{0,6}\\s\\w{0,4}\\s\\(?\\w{0,5}\\)?\\sArduino"); Matcher matcher; File file; boolean matchFound = false; //Checks for board list file = new File(BOARD_LIST_FILE); if(!file.exists()) { System.err.println( "ACLI Manager file error: Board list not found."); return false; } try { FileReader fileReader = new FileReader(BOARD_LIST_FILE); BufferedReader bufferedReader = new BufferedReader(fileReader); String temp; while((temp = bufferedReader.readLine()) != null) { matcher = pattern.matcher(temp); //If a match was found matchFound = matcher.find(); if(matchFound) { String[] substrings = temp.split(" "); port = "serial://" + substrings[0]; core = substrings[(substrings.length - 1)]; break; } } bufferedReader.close(); //If no match was found if(!matchFound) { System.err.println( "ACLI Manager file error: Failed to find port."); return false; } } catch(Exception e) { System.err.println("ACLI Manager file error: " + e); return false; } return true; } /** * Gets the most recent error string resulting from command executions. * @return - The currently set error string or "No error" if there isn't one. */ public String getErrorStr() { return errorString; } }
package gluu.scim2.client.multipleresource; import gluu.scim2.client.UserBaseTest; import org.gluu.oxtrust.model.scim2.BaseScimResource; import org.gluu.oxtrust.model.scim2.ListResponse; import org.gluu.oxtrust.model.scim2.SearchRequest; import org.gluu.oxtrust.model.scim2.group.GroupResource; import org.gluu.oxtrust.model.scim2.group.Member; import org.gluu.oxtrust.model.scim2.user.Group; import org.gluu.oxtrust.model.scim2.user.UserResource; import org.testng.annotations.Test; import javax.ws.rs.core.Response; import java.util.*; import java.util.stream.Collectors; import static javax.ws.rs.core.Response.Status.*; import static org.testng.Assert.*; /** * This test is quite representative of SCIM funtionalities: it covers a good amount of operations in different flavors, * and at the same time showcases how User's group assignment works * Created by jgomer on 2017-12-28. */ public class GroupAssignUserTest extends UserBaseTest { private List<UserResource> friends=new ArrayList<>(); private GroupResource group, group2; private UserResource user; @Test public void createUsers(){ logger.info("Creating 3 users..."); List<UserResource> mentals =new ArrayList<>(); //Hugo, Paco, and Luis; or Curly, and the other 2 crazy men Arrays.asList(1,2,3).forEach( who -> mentals.add(getDummyPatient())); for (UserResource user : mentals){ Response response=client.createUser(user, null, null); assertEquals(response.getStatus(), CREATED.getStatusCode()); friends.add(response.readEntity(usrClass)); } } @Test(dependsOnMethods = "createUsers") public void assignToGroup(){ Set<Member> buddies=friends.stream().map(buddy -> { Member m=new Member(); m.setValue(buddy.getId()); m.setDisplay(buddy.getDisplayName()); return m; }).collect(Collectors.toSet()); GroupResource gr=new GroupResource(); gr.setDisplayName("3 best demented buddies"); gr.setMembers(buddies); logger.info("Assigning users to new group..."); Response response=client.createGroup(gr, null, null); assertEquals(response.getStatus(), CREATED.getStatusCode()); group=response.readEntity(GroupResource.class); //Verify the sanitarium is completely booked assertTrue( group.getMembers().stream().map(Member::getValue).collect(Collectors.toSet()).equals( friends.stream().map(UserResource::getId).collect(Collectors.toSet()) )); } @Test(dependsOnMethods = "assignToGroup") public void assignToSecondGroup(){ //Creates a new group with only the first patient on it Member m=new Member(); m.setValue(friends.get(0).getId()); group2=new GroupResource(); group2.setDisplayName("Auxiliary asylum"); group2.setMembers(Collections.singleton(m)); logger.info("Creating a secondary group..."); Response response=client.createGroup(group2, null, null); assertEquals(response.getStatus(), CREATED.getStatusCode()); group2=response.readEntity(GroupResource.class); } @Test(dependsOnMethods = "assignToSecondGroup") public void verifyGroupsAttribute(){ //Refresh the user instances so getGroups() can be called //builds a filter string StringBuilder filter=new StringBuilder(); friends.forEach(buddy -> filter.append(String.format(" or id eq \"%s\"", buddy.getId()))); //builds a search request SearchRequest sr=new SearchRequest(); sr.setFilter(filter.substring(4)); sr.setCount(3); //Retrieve only the first 3 //Performs the query logger.info("Issuing query with filter: {}", sr.getFilter()); Response response=client.searchUsersPost(sr); assertEquals(response.getStatus(), OK.getStatusCode()); logger.info("Verifying groups and users consistency..."); List<BaseScimResource> buddies=response.readEntity(ListResponse.class).getResources(); assertEquals(buddies.size(),3); //Verify all mad belong to group, and one of them, additionally to group2 buddies.stream().map(usrClass::cast).forEach(buddy -> { Set<String> groupIds=buddy.getGroups().stream().map(Group::getValue).collect(Collectors.toSet()); assertTrue(groupIds.contains(group.getId())); }); Optional<UserResource> usrOpt=buddies.stream().map(usrClass::cast) .filter(buddy -> buddy.getGroups().size()>1).findFirst(); assertTrue(usrOpt.isPresent()); user=usrOpt.get(); assertTrue(user.getGroups().stream().map(Group::getValue).collect(Collectors.toSet()).contains(group2.getId())); } @Test(dependsOnMethods = "verifyGroupsAttribute") public void modifyGroupFromUser(){ //Try to modify read-only "groups" attribute of User Resource (must not change) user.getGroups().remove(0); Response response=client.updateUser(user, user.getId(), null, null); assertEquals(response.getStatus(), OK.getStatusCode()); logger.info("Attempting to modify group membership using /Users endpoint..."); user=response.readEntity(usrClass); Set<String> groupIds=user.getGroups().stream().map(Group::getValue).collect(Collectors.toSet()); assertTrue(groupIds.contains(group.getId())); assertTrue(groupIds.contains(group2.getId())); } @Test(dependsOnMethods = "modifyGroupFromUser") public void alterMemberships(){ //Effectively remove one member and add admin Member aMental=group.getMembers().stream().findAny().get(); Member admin=new Member(); admin.setValue(getAdminId()); group.getMembers().remove(aMental); group.getMembers().add(admin); logger.info("Removing one and adding one member..."); Response response=client.updateGroup(group, group.getId(), null, null); assertEquals(response.getStatus(), OK.getStatusCode()); group=response.readEntity(GroupResource.class); assertFalse(group.getMembers().contains(aMental)); //Here we don't use contains because equality in Member object inspects all fields (not only value) assertTrue(group.getMembers().stream().anyMatch(m -> admin.getValue().equals(m.getValue()))); logger.info("Group has correct members"); //Verify groups attribute in users reflected changes response=client.getUserById(aMental.getValue(), "groups", null); assertEquals(response.getStatus(), OK.getStatusCode()); UserResource patient=response.readEntity(usrClass); assertTrue(patient.getGroups()==null || patient.getGroups().stream().noneMatch(gr -> gr.getValue().equals(group.getId()))); response=client.getUserById(admin.getValue(), "groups", null); assertEquals(response.getStatus(), OK.getStatusCode()); patient=response.readEntity(usrClass); assertTrue(patient.getGroups().stream().anyMatch(gr -> gr.getValue().equals(group.getId()))); logger.info("Users have correct memberships"); } @Test(dependsOnMethods = "alterMemberships", alwaysRun = true) public void deleteGroups(){ //Dismantle sanitarium... for (GroupResource gr : Arrays.asList(group, group2)) if (gr!=null){ Response response=client.deleteGroup(gr.getId()); if (response.getStatus()==NO_CONTENT.getStatusCode()) logger.info("Group '{}' removed", gr.getDisplayName()); else logger.error("Error removing group '{}'", gr.getDisplayName()); } } @Test(dependsOnMethods = "deleteGroups", alwaysRun = true) public void deleteUsers(){ if (friends!=null){ //Considered sane now for (UserResource usr : friends){ Response response=client.deleteUser(usr.getId()); if (response.getStatus()==NO_CONTENT.getStatusCode()) logger.info("User '{}' removed", usr.getDisplayName()); else logger.error("Error removing user '{}'", usr.getDisplayName()); } } } private UserResource getDummyPatient() { UserResource user = new UserResource(); user.setUserName("test-" + Math.random()); user.setDisplayName(user.getUserName()); return user; } private String getAdminId(){ //Search the id of the admin user SearchRequest sr=new SearchRequest(); sr.setFilter("userName eq \"admin\""); Response response=client.searchUsersPost(sr); assertEquals(response.getStatus(), OK.getStatusCode()); ListResponse lr=response.readEntity(ListResponse.class); assertTrue(lr.getResources().size()>0); return lr.getResources().get(0).getId(); } }
import java.awt.*; import java.util.*; import java.util.Iterator; import java.util.List; import javax.imageio.ImageIO; import java.io.*; //edit moves public class QueenPiece extends ChessPieceClass { public QueenPiece(double x,double y,double width,double height,boolean isPo){ this.x=x; this.y=y; this.width=width; this.height=height; playerOne=isPo; try{ if(playerOne){ pieceImage=ImageIO.read(new File("FilteredTextures//queen.png")); }else{ pieceImage=ImageIO.read(new File("FilteredTextures//queenTwo.png")); } }catch(IOException e){ e.printStackTrace(); } } //combine rook and bishop ones. public void Moves(ArrayList<BoardParts> open,ArrayList<BoardParts> enemyPieceSquares){ for(int i=0;i<enemyPieceSquares.size();i++){ if(Math.abs((enemyPieceSquares.get(i).getX()/50.0)-((this.x-5.0)/50.0))==Math.abs((enemyPieceSquares.get(i).getY()/50.0)-((this.y-5.0)/50.0))){ pieceMoves.add(enemyPieceSquares.get(i)); } } for(int index=0;index<open.size();index++){ if(Math.abs(((open.get(index).getX()/50.0)-(this.x-5)/50.0))==(Math.abs(((open.get(index).getY())/50.0)-(this.y-5)/50.0))){ pieceMoves.add(open.get(index)); } } List<BoardParts> diagonal=sortListDia(pieceMoves,enemyPieceSquares); pieceMoves.removeAll(pieceMoves); for(int index=0;index<enemyPieceSquares.size();index++){ if(((enemyPieceSquares.get(index).getX()/50.0)==(this.x-5)/50.0) || (((enemyPieceSquares.get(index).getY())/50.0)==(this.y-5)/50.0)){ pieceMoves.add(enemyPieceSquares.get(index)); } } for(int index=0;index<open.size();index++){ if(((open.get(index).getX()/50.0)==(this.x-5)/50.0) || (((open.get(index).getY())/50.0)==(this.y-5)/50.0)){ pieceMoves.add(open.get(index)); } } List<BoardParts> hv=sortListHorVer(pieceMoves,enemyPieceSquares); pieceMoves.removeAll(pieceMoves); pieceMoves.addAll(diagonal); pieceMoves.addAll(hv); canDraw=true; } public List<BoardParts> sortListDia(List<BoardParts> li,List<BoardParts> enemy){ List<BoardParts> northWest=new ArrayList<BoardParts>(); List<BoardParts> northEast=new ArrayList<BoardParts>(); List<BoardParts> southEast=new ArrayList<BoardParts>(); List<BoardParts> southWest=new ArrayList<BoardParts>(); for(int i=0;i<li.size();i++){ if(li.get(i).getX()<=this.x && li.get(i).getY()<=this.y-5.0){ northWest.add(li.get(i)); } else if(li.get(i).getX()>=this.x && li.get(i).getY()<=this.y-5.0){ northEast.add(li.get(i)); } else if(li.get(i).getX()>=this.x && li.get(i).getY()>=this.y-5.0){ southEast.add(li.get(i)); } else if(li.get(i).getX()<=this.x && li.get(i).getY()>=this.y-5.0){ southWest.add(li.get(i)); } } double tx=this.x-5.0; double ty=this.y-5.0; List<BoardParts> tli=new ArrayList<BoardParts>(); if(!(northWest.isEmpty())){ for(int i=0;i<li.size();i++){ if(tx-50.0==li.get(i).getX() && ty-50.0==li.get(i).getY()){ tx-=50.0; ty-=50; tli.add(li.get(i)); if(enemy.contains(li.get(i))){ break; } i=-1; } } tx=this.x-5.0; ty=this.y-5.0; } if(!(northEast.isEmpty())){ for(int i=0;i<li.size();i++){ if(tx+50.0==li.get(i).getX() && ty-50.0==li.get(i).getY()){ tx+=50.0; ty-=50.0; tli.add(li.get(i)); if(enemy.contains(li.get(i))){ break; } i=-1; } } tx=this.x-5.0; ty=this.y-5.0; } if(!(southEast.isEmpty())){ for(int i=0;i<li.size();i++){ if(tx+50.0==li.get(i).getX() && ty+50.0==li.get(i).getY()){ tx+=50.0; ty+=50.0; tli.add(li.get(i)); if(enemy.contains(li.get(i))){ break; } i=-1; } } tx=this.x-5.0; ty=this.y-5.0; } if(!(southWest.isEmpty())){ for(int i=0;i<li.size();i++){ if(tx-50.0==li.get(i).getX() && ty+50.0==li.get(i).getY()){ tx-=50.0; ty+=50.0; tli.add(li.get(i)); if(enemy.contains(li.get(i))){ break; } i=-1; } } tx=this.x-5.0; ty=this.y-5.0; } return tli; } public List<BoardParts> sortListHorVer(List<BoardParts> li,List<BoardParts> enemy){ List<BoardParts> west=new ArrayList<BoardParts>(); List<BoardParts> east=new ArrayList<BoardParts>(); List<BoardParts> north=new ArrayList<BoardParts>(); List<BoardParts> south=new ArrayList<BoardParts>(); for(int i=0;i<li.size();i++){ if(li.get(i).getX()<=this.x && li.get(i).getY()==this.y-5.0){ west.add(li.get(i)); } else if(li.get(i).getX()>=this.x && li.get(i).getY()==this.y-5.0){ east.add(li.get(i)); } else if(li.get(i).getY()<=this.y && li.get(i).getX()==this.x-5.0){ north.add(li.get(i)); } else if(li.get(i).getY()>=this.y && li.get(i).getX()==this.x-5.0){ south.add(li.get(i)); } } double tx=this.x-5.0; double ty=this.y-5.0; List<BoardParts> tli=new ArrayList<BoardParts>(); if(!(west.isEmpty())){ for(int i=0;i<li.size();i++){ if(tx-50.0==li.get(i).getX()){ tx-=50.0; tli.add(li.get(i)); if(enemy.contains(li.get(i))){ break; } i=-1; } } tx=this.x-5.0; ty=this.y-5.0; } if(!(east.isEmpty())){ for(int i=0;i<li.size();i++){ if(tx+50.0==li.get(i).getX()){ tx+=50.0; tli.add(li.get(i)); if(enemy.contains(li.get(i))){ break; } i=-1; } } tx=this.x-5.0; ty=this.y-5.0; } if(!(north.isEmpty())){ for(int i=0;i<li.size();i++){ if(ty-50.0==li.get(i).getY()){ ty-=50.0; tli.add(li.get(i)); if(enemy.contains(li.get(i))){ break; } i=-1; } } tx=this.x-5.0; ty=this.y-5.0; } if(!(south.isEmpty())){ for(int i=0;i<li.size();i++){ if(ty+50.0==li.get(i).getY()){ ty+=50.0; tli.add(li.get(i)); if(enemy.contains(li.get(i))){ break; } i=-1; } } tx=this.x-5.0; ty=this.y-5.0; } return tli; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hp.hpl.jena.shared.uuid; import com.hp.hpl.jena.shared.uuid.Bits; import junit.framework.TestCase; public class TestBits extends TestCase { public void testMask1() { long v = Bits.mask(0,1) ; check(0x1L, v) ; } public void testMask2() { long v = Bits.mask(0,2) ; check(0x3L, v) ; } public void testMask3() { long v = Bits.mask(1,2) ; check(0x2L, v) ; } public void testMask4() { long v = Bits.mask(0,64) ; check(-1L, v) ; } public void testMask5() { long v = Bits.mask(16,48) ; check(0x0000FFFFFFFF0000L, v) ; } public void testMask6() { long v = Bits.mask(16,64) ; check(0xFFFFFFFFFFFF0000L, v) ; } public void testMask7() { long v = Bits.mask(0, 0) ; check(0L, v) ; } public void testMaskZero1() { long v = Bits.maskZero(0,1) ; check(~0x1L, v) ; } public void testMaskZero2() { long v = Bits.maskZero(0,2) ; check(~0x3L, v) ; } public void testMaskZero3() { long v = Bits.maskZero(1,2) ; check(0xFFFFFFFFFFFFFFFDL, v) ; } public void testMaskZero4() { long v = Bits.maskZero(0,64) ; check(0, v) ; } public void testMaskZero5() { long v = Bits.maskZero(16,48) ; check(0xFFFF00000000FFFFL, v) ; } public void testMaskZero6() { long v = Bits.maskZero(16,64) ; check(0xFFFFL, v) ; } public void testMaskZero7() { long v = Bits.maskZero(0, 0) ; check(-1L, v) ; } public void testClear1() { long v = 0xF0F0 ; v = Bits.clear(v, 4, 8) ; String s = Long.toHexString(v) ; check(0xF000L, v ) ; } public void testClear2() { long v = 0x8000000000000000L; v = Bits.clear(v, 63, 64) ; String s = Long.toHexString(v) ; check(0x0L, v ) ; } public void testClear3() { long v = 0xC000000000000000L; v = Bits.clear(v, 63, 64) ; String s = Long.toHexString(v) ; check(0x4000000000000000L, v ) ; } public void testClear4() { long v = -1 ; v = Bits.clear(v, 63, 64) ; String s = Long.toHexString(v) ; check(0x7FFFFFFFFFFFFFFFL, v ) ; } public void testClear5() { long v = -1 ; v = Bits.clear(v, 32, 64) ; String s = Long.toHexString(v) ; check(0x00000000FFFFFFFFL, v ) ; } public void testClear6() { long v = -1 ; v = Bits.clear(v, 0, 32) ; String s = Long.toHexString(v) ; check(0xFFFFFFFF00000000L, v ) ; } public void testClear7() { long v = -1L ; v = Bits.clear(v, 0, 0) ; String s = Long.toHexString(v) ; check(-1L, v ) ; } public void testSet1() { long v = 0x0 ; v = Bits.set(v, 0, 1) ; check(1, v) ; } public void testSet2() { long v = 0x1 ; v = Bits.set(v, 0, 1) ; check(1, v) ; } public void testSet3() { long v = 0xF0 ; v = Bits.set(v, 0, 1) ; check(0xF1, v) ; } public void testSet4() { long v = 0xF0F0F0F0F0F0F0F0L ; v = Bits.set(v, 0, 8) ; check(0xF0F0F0F0F0F0F0FFL, v) ; } public void testSet5() { long v = 0 ; v = Bits.set(v, 16, 48) ; check(0x0000FFFFFFFF0000L, v) ; } public void testSet6() { long v = 0 ; v = Bits.set(v, 63, 64) ; check(0x8000000000000000L, v) ; } public void testSet7() { long v = 0 ; v = Bits.set(v, 62, 64) ; check(0xC000000000000000L, v) ; } public void testSet8() { long v = 0 ; v = Bits.set(v, 0, 64) ; check(-1L, v) ; } public void testSet9() { long v = 0 ; v = Bits.set(v, 10, 10) ; check(0, v) ; } public void testSetBit1() { long v = 0 ; v = Bits.set(v, 0) ; check(1, v) ; } public void testSetBit2() { long v = 0 ; v = Bits.set(v, 1) ; check(2, v) ; } public void testSetBit3() { long v = 1 ; v = Bits.set(v, 0) ; check(1, v) ; } public void testSetBit4() { long v = -1 ; v = Bits.set(v, 0) ; check(-1, v) ; } public void testSetBit5() { long v = 0 ; v = Bits.set(v, 62) ; check(0x4000000000000000L, v) ; } public void testSetBit6() { long v = 0 ; v = Bits.set(v, 63) ; check(0x8000000000000000L, v) ; } public void testBitTest1() { long v = 0 ; assertTrue(Bits.test(v, false, 0)) ; } public void testBitTest2() { long v = 1 ; assertTrue(Bits.test(v, true, 0)) ; } public void testBitTest3() { long v = -1 ; assertTrue(Bits.test(v, true, 63)) ; } public void testBitTest4() { long v = 0x7FFFFFFFFFFFFFFFL ; assertTrue(Bits.test(v, false, 63)) ; } public void testBitsTest1() { long v = 0xFEDCBA9876543210L ; assertTrue(Bits.test(v, 0x0, 0, 4)) ; } public void testBitsTest2() { long v = 0xFEDCBA9876543210L ; assertTrue(Bits.test(v, 0x10, 0, 8)) ; } public void testBitsTest3() { long v = 0xFEDCBA9876543210L ; assertTrue(Bits.test(v, v, 0, 64)) ; } public void testBitsTest4() { long v = 0xFEDCBA9876543210L ; assertFalse(Bits.test(v, 0, 0, 64)) ; } public void testBitsTest5() { long v = 0xFEDCBA9876543210L ; assertTrue(Bits.test(v, 0x0000BA9876540000L, 16, 48)) ; } public void testAccess1() { long v = -1 ; v = Bits.access(v, 4, 8) ; check(0xF0L, v ) ; } public void testAccess2() { long v = 0xFEDCBA9876543210L ; v = Bits.access(v, 0, 8) ; check(0x10L, v ) ; } public void testAccess3() { long v = 0xFEDCBA9876543210L ; v = Bits.access(v, 0, 64) ; check(0xFEDCBA9876543210L, v ) ; } public void testAccess4() { long v = 0xFEDCBA9876543210L ; v = Bits.access(v, 62, 64) ; check(0xC000000000000000L, v ) ; } public void testAccess5() { long v = 0xFEDCBA9876543210L ; v = Bits.access(v, 0, 2) ; check(0L, v ) ; } public void testPack1() { long v = 0 ; v = Bits.pack(v, 0xFL, 0, 4) ; check(0xFL, v ) ; } public void testPack2() { long v = 0xF0 ; v = Bits.pack(v, 0x2, 0, 4) ; check(0xF2L, v ) ; } public void testPack3() { long v = -1 ; v = Bits.pack(v, 0x2, 0, 8) ; check(0xFFFFFFFFFFFFFF02L, v ) ; } public void testPack4() { long v = 0xFFFFFFFF00000000L ; v = Bits.pack(v, 0x2, 16, 32) ; check(0xFFFFFFFF00020000L, v ) ; } public void testPack5() { long v = 0xFFFFFFFF00000000L ; v = Bits.pack(v, 0xFFFF, 16, 32) ; check(0xFFFFFFFFFFFF0000L, v ) ; } public void testUnpack1() { long v = 0xABCDABCDABCDABCDL ; v = Bits.unpack(v, 0, 4) ; check(0xDL, v ) ; } public void testUnpack2() { long v = 0xABCDABCDABCDABCDL ; v = Bits.unpack(v, 63, 64) ; check(1L, v ) ; } public void testUnpack3() { long v = 0xABCDABCDABCDABCDL ; v = Bits.unpack(v, 56, 64) ; check(0xABL, v ) ; } public void testUnpack4() { long v = 0xABCD12345678ABCDL ; v = Bits.unpack(v, 32, 48) ; check(0x1234L, v ) ; } public void testUnpackStr1() { String s = "ABCD" ; long v = Bits.unpack(s, 0, 4) ; check(0xABCDL, v ) ; } public void testUnpackStr2() { String s = "ABCD" ; long v = Bits.unpack(s, 2, 4) ; check(0xCDL, v ) ; } public void testUnpackStr3() { String s = "ABCD" ; long v = Bits.unpack(s, 0, 2) ; check(0xABL, v ) ; } private void check(long expected, long actual) { check(null, expected, actual) ; } private void check(String msg, long expected, long actual) { if ( expected == actual ) return ; String s = "Expected: "+Long.toHexString(expected)+" : Got: "+Long.toHexString(actual) ; if ( msg != null ) s = msg+": "+s ; assertFalse(s, true) ; } }
/* Copyright 2009 - 2010 The Stajistics Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.stajistics.management; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.stajistics.TestUtil.buildStatsKeyExpectations; import static org.stajistics.management.StatsMXBeanUtil.MANAGER_NAME_CONFIG; import static org.stajistics.management.StatsMXBeanUtil.MANAGER_NAME_SESSION; import static org.stajistics.management.StatsMXBeanUtil.MANAGER_NAME_STATS; import static org.stajistics.management.StatsMXBeanUtil.SUBTYPE_CONFIG; import static org.stajistics.management.StatsMXBeanUtil.SUBTYPE_SESSION; import static org.stajistics.management.StatsMXBeanUtil.TYPE_KEYS; import static org.stajistics.management.StatsMXBeanUtil.buildKeyName; import static org.stajistics.management.StatsMXBeanUtil.buildManagerName; import java.lang.management.ManagementFactory; import javax.management.MBeanServer; import javax.management.MBeanServerFactory; import javax.management.ObjectName; import org.jmock.Expectations; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.stajistics.AbstractStajisticsTestCase; import org.stajistics.StatsKey; import org.stajistics.StatsManager; import org.stajistics.configuration.StatsConfig; import org.stajistics.configuration.StatsConfigBuilderFactory; import org.stajistics.configuration.StatsConfigManager; import org.stajistics.management.beans.StatsConfigMXBean; import org.stajistics.management.beans.StatsConfigManagerMXBean; import org.stajistics.management.beans.StatsManagerMXBean; import org.stajistics.management.beans.StatsSessionMXBean; import org.stajistics.management.beans.StatsSessionManagerMXBean; import org.stajistics.session.StatsSession; import org.stajistics.session.StatsSessionManager; /** * * * * @author The Stajistics Project */ public class DefaultStatsMXBeanRegistrarTest extends AbstractStajisticsTestCase { private static final String NAMESPACE = "ns"; private static final String NORMAL = "normal"; private StatsKey mockKey; private StatsMXBeanFactory mockMBeanFactory; private StatsManager mockStatsManager; private StatsSessionManager mockSessionManager; private StatsConfigManager mockConfigManager; private MBeanServer mBeanServer; private DefaultStatsMXBeanRegistrar mxBeanRegistrar; @Before public void setUp() { mockKey = mockery.mock(StatsKey.class); mockStatsManager = mockery.mock(StatsManager.class); mockSessionManager = mockery.mock(StatsSessionManager.class); mockConfigManager = mockery.mock(StatsConfigManager.class); mockMBeanFactory = mockery.mock(StatsMXBeanFactory.class); mBeanServer = MBeanServerFactory.newMBeanServer(); DefaultStatsMXBeanRegistrar.setMBeanServer(mBeanServer); mxBeanRegistrar = new DefaultStatsMXBeanRegistrar(NAMESPACE, mockMBeanFactory); mockery.checking(new Expectations() {{ allowing(mockStatsManager).getNamespace(); will(returnValue(NAMESPACE)); }}); } @After public void tearDown() { mxBeanRegistrar = null; DefaultStatsMXBeanRegistrar.setMBeanServer(ManagementFactory.getPlatformMBeanServer()); } @Test public void testConstructWithNullNamespace() { try { new DefaultStatsMXBeanRegistrar(null, mockMBeanFactory); } catch (NullPointerException npe) { assertEquals("namespace", npe.getMessage()); } } @Test public void testConstructWithEmptyNamespace() { try { new DefaultStatsMXBeanRegistrar("", mockMBeanFactory); } catch (IllegalArgumentException e) { assertEquals("empty namespace", e.getMessage()); } } @Test public void testConstructWithNullMXBeanFactory() { try { new DefaultStatsMXBeanRegistrar(NAMESPACE, null); } catch (NullPointerException npe) { assertEquals("mxBeanFactory", npe.getMessage()); } } @Test public void testGetMBeanServer() { assertSame(mBeanServer, DefaultStatsMXBeanRegistrar.getMBeanServer()); } @Test public void testRegisterManagerMBean() throws Exception { final StatsManagerMXBean mockManagerMBean = mockery.mock(StatsManagerMXBean.class); mockery.checking(new Expectations() {{ one(mockMBeanFactory).createManagerMXBean(mockStatsManager); will(returnValue(mockManagerMBean)); }}); ObjectName objectName = new ObjectName(buildManagerName(NAMESPACE, MANAGER_NAME_STATS, true)); assertTrue(mBeanServer.queryMBeans(objectName, null).isEmpty()); mxBeanRegistrar.registerStatsManagerMXBean(mockStatsManager); assertEquals(1, mBeanServer.queryMBeans(objectName, null).size()); } @Test public void testUnregisterManagerMBean() throws Exception { ObjectName objectName = new ObjectName(buildManagerName(NAMESPACE,MANAGER_NAME_STATS, true)); final StatsManagerMXBean mockManagerMBean = mockery.mock(StatsManagerMXBean.class); mBeanServer.registerMBean(mockManagerMBean, objectName); mxBeanRegistrar.unregisterStatsManagerMXBean(); assertTrue(mBeanServer.queryMBeans(objectName, null).isEmpty()); } @Test public void testRegisterSessionManagerMBean() throws Exception { final StatsSessionManagerMXBean mockSessionManagerMBean = mockery.mock(StatsSessionManagerMXBean.class); mockery.checking(new Expectations() {{ one(mockMBeanFactory).createSessionManagerMXBean(with(mockSessionManager)); will(returnValue(mockSessionManagerMBean)); }}); ObjectName objectName = new ObjectName(buildManagerName(NAMESPACE, MANAGER_NAME_SESSION, true)); assertTrue(mBeanServer.queryMBeans(objectName, null).isEmpty()); mxBeanRegistrar.registerSessionManagerMXBean(mockSessionManager); assertEquals(1, mBeanServer.queryMBeans(objectName, null).size()); } @Test public void testUnregisterSessionManagerMBean() throws Exception { ObjectName objectName = new ObjectName(buildManagerName(NAMESPACE, MANAGER_NAME_SESSION, true)); final StatsSessionManagerMXBean mockSessionManagerMBean = mockery.mock(StatsSessionManagerMXBean.class); mBeanServer.registerMBean(mockSessionManagerMBean, objectName); mxBeanRegistrar.unregisterSessionManagerMXBean(); assertTrue(mBeanServer.queryMBeans(objectName, null).isEmpty()); } @Test public void testRegisterConfigManagerMBean() throws Exception { final StatsConfigManagerMXBean mockConfigManagerMBean = mockery.mock(StatsConfigManagerMXBean.class); mockery.checking(new Expectations() {{ one(mockMBeanFactory).createConfigManagerMXBean(mockConfigManager); will(returnValue(mockConfigManagerMBean)); }}); ObjectName objectName = new ObjectName(buildManagerName(NAMESPACE,MANAGER_NAME_CONFIG, true)); assertTrue(mBeanServer.queryMBeans(objectName, null).isEmpty()); mxBeanRegistrar.registerConfigManagerMXBean(mockConfigManager); assertEquals(1, mBeanServer.queryMBeans(objectName, null).size()); } @Test public void testUnregisterConfigManagerMBean() throws Exception { ObjectName objectName = new ObjectName(buildManagerName(NAMESPACE, MANAGER_NAME_CONFIG, true)); final StatsConfigManagerMXBean mockConfigManagerMBean = mockery.mock(StatsConfigManagerMXBean.class); mBeanServer.registerMBean(mockConfigManagerMBean, objectName); mxBeanRegistrar.unregisterConfigManagerMXBean(); assertTrue(mBeanServer.queryMBeans(objectName, null).isEmpty()); } @Test public void testRegisterConfigMBean() throws Exception { buildStatsKeyExpectations(mockery, mockKey, NORMAL); final StatsConfig mockConfig = mockery.mock(StatsConfig.class); final StatsConfigBuilderFactory mockConfigBuilderFactory = mockery.mock(StatsConfigBuilderFactory.class); final StatsConfigMXBean mockConfigMBean = mockery.mock(StatsConfigMXBean.class); mockery.checking(new Expectations() {{ allowing(mockStatsManager).getConfigBuilderFactory(); will(returnValue(mockConfigBuilderFactory)); one(mockMBeanFactory).createConfigMXBean(NAMESPACE, mockKey, mockConfig); will(returnValue(mockConfigMBean)); ignoring(mockConfig); }}); ObjectName name = new ObjectName(buildKeyName(NAMESPACE, mockKey, TYPE_KEYS, SUBTYPE_CONFIG, true)); assertTrue(mBeanServer.queryMBeans(name, null).isEmpty()); mxBeanRegistrar.registerConfigMXBean(mockKey, mockConfig); assertEquals(1, mBeanServer.queryMBeans(name, null).size()); } @Test public void testUnregisterConfigMBeanIfNecessary() throws Exception { buildStatsKeyExpectations(mockery, mockKey, NORMAL); final StatsConfig mockConfig = mockery.mock(StatsConfig.class); final StatsConfigMXBean mockConfigMbean = mockery.mock(StatsConfigMXBean.class); mockery.checking(new Expectations() {{ ignoring(mockConfig); }}); ObjectName name = new ObjectName(buildKeyName(NAMESPACE, mockKey, TYPE_KEYS, SUBTYPE_CONFIG, true)); mBeanServer.registerMBean(mockConfigMbean, name); mxBeanRegistrar.unregisterConfigMXBeanIfNecessary(mockKey); assertTrue(mBeanServer.queryMBeans(name, null).isEmpty()); } @Test public void testUnregisterConfigMBeanIfNotNecessary() throws Exception { buildStatsKeyExpectations(mockery, mockKey, NORMAL); ObjectName name = new ObjectName(buildKeyName(NAMESPACE, mockKey, TYPE_KEYS, SUBTYPE_SESSION, true)); mxBeanRegistrar.unregisterConfigMXBeanIfNecessary(mockKey); assertTrue(mBeanServer.queryMBeans(name, null).isEmpty()); } @Test public void testRegisterSessionMBean() throws Exception { buildStatsKeyExpectations(mockery, mockKey, NORMAL); final StatsSession mockSession = mockery.mock(StatsSession.class); final StatsSessionMXBean mockSessionMBean = mockery.mock(StatsSessionMXBean.class); mockery.checking(new Expectations() {{ allowing(mockStatsManager).getSessionManager(); will(returnValue(mockSessionManager)); one(mockMBeanFactory).createSessionMXBean(NAMESPACE, mockSession); will(returnValue(mockSessionMBean)); allowing(mockSession).getKey(); will(returnValue(mockKey)); ignoring(mockSession); }}); ObjectName name = new ObjectName(buildKeyName(NAMESPACE, mockKey, TYPE_KEYS, SUBTYPE_SESSION, true)); assertTrue(mBeanServer.queryMBeans(name, null).isEmpty()); mxBeanRegistrar.registerSessionMXBean(mockSession); assertEquals(1, mBeanServer.queryMBeans(name, null).size()); } @Test public void testUnregisterSessionMBeanIfNecessary() throws Exception { buildStatsKeyExpectations(mockery, mockKey, NORMAL); final StatsSession mockSession = mockery.mock(StatsSession.class); final StatsSessionMXBean mockSessionMBean = mockery.mock(StatsSessionMXBean.class); mockery.checking(new Expectations() {{ allowing(mockSession).getKey(); will(returnValue(mockKey)); ignoring(mockSession); }}); ObjectName name = new ObjectName(buildKeyName(NAMESPACE, mockKey, TYPE_KEYS, SUBTYPE_SESSION, true)); mBeanServer.registerMBean(mockSessionMBean, name); mxBeanRegistrar.unregisterSessionMXBeanIfNecessary(mockKey); assertTrue(mBeanServer.queryMBeans(name, null).isEmpty()); } @Test public void testUnregisterSessionMBeanIfNotNecessary() throws Exception { buildStatsKeyExpectations(mockery, mockKey, NORMAL); ObjectName name = new ObjectName(buildKeyName(NAMESPACE, mockKey, TYPE_KEYS, SUBTYPE_SESSION, true)); mxBeanRegistrar.unregisterSessionMXBeanIfNecessary(mockKey); assertTrue(mBeanServer.queryMBeans(name, null).isEmpty()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.naming; import java.util.Enumeration; import java.util.HashMap; import java.util.Hashtable; import javax.naming.Binding; import javax.naming.CompositeName; import javax.naming.Context; import javax.naming.InitialContext; import javax.naming.LinkRef; import javax.naming.Name; import javax.naming.NameAlreadyBoundException; import javax.naming.NameClassPair; import javax.naming.NameNotFoundException; import javax.naming.NameParser; import javax.naming.NamingEnumeration; import javax.naming.NamingException; import javax.naming.NotContextException; import javax.naming.OperationNotSupportedException; import javax.naming.Reference; import javax.naming.Referenceable; import javax.naming.spi.NamingManager; /** * Catalina JNDI Context implementation. * * @author Remy Maucherat */ public class NamingContext implements Context { // -------------------------------------------------------------- Constants /** * Name parser for this context. */ protected static final NameParser nameParser = new NameParserImpl(); private static final org.apache.juli.logging.Log log = org.apache.juli.logging.LogFactory.getLog(NamingContext.class); // ----------------------------------------------------------- Constructors /** * Builds a naming context using the given environment. */ public NamingContext(Hashtable<String,Object> env, String name) throws NamingException { this.bindings = new HashMap<String,NamingEntry>(); this.env = new Hashtable<String,Object>(); // FIXME ? Could be put in the environment ? this.name = name; // Populating the environment hashtable if (env != null ) { Enumeration<String> envEntries = env.keys(); while (envEntries.hasMoreElements()) { String entryName = envEntries.nextElement(); addToEnvironment(entryName, env.get(entryName)); } } } /** * Builds a naming context using the given environment. */ public NamingContext(Hashtable<String,Object> env, String name, HashMap<String,NamingEntry> bindings) throws NamingException { this(env, name); this.bindings = bindings; } // ----------------------------------------------------- Instance Variables /** * Environment. */ protected Hashtable<String,Object> env; /** * The string manager for this package. */ protected static final StringManager sm = StringManager.getManager(Constants.Package); /** * Bindings in this Context. */ protected HashMap<String,NamingEntry> bindings; /** * Name of the associated Catalina Context. */ protected String name; /** * Determines if an attempt to write to a read-only context results in an * exception or if the request is ignored. */ private boolean exceptionOnFailedWrite = true; public boolean getExceptionOnFailedWrite() { return exceptionOnFailedWrite; } public void setExceptionOnFailedWrite(boolean exceptionOnFailedWrite) { this.exceptionOnFailedWrite = exceptionOnFailedWrite; } // -------------------------------------------------------- Context Methods /** * Retrieves the named object. If name is empty, returns a new instance * of this context (which represents the same naming context as this * context, but its environment may be modified independently and it may * be accessed concurrently). * * @param name the name of the object to look up * @return the object bound to name * @exception NamingException if a naming exception is encountered */ @Override public Object lookup(Name name) throws NamingException { return lookup(name, true); } /** * Retrieves the named object. * * @param name the name of the object to look up * @return the object bound to name * @exception NamingException if a naming exception is encountered */ @Override public Object lookup(String name) throws NamingException { return lookup(new CompositeName(name), true); } /** * Binds a name to an object. All intermediate contexts and the target * context (that named by all but terminal atomic component of the name) * must already exist. * * @param name the name to bind; may not be empty * @param obj the object to bind; possibly null * @exception NameAlreadyBoundException if name is already bound * @exception javax.naming.directory.InvalidAttributesException if object * did not supply all mandatory attributes * @exception NamingException if a naming exception is encountered */ @Override public void bind(Name name, Object obj) throws NamingException { bind(name, obj, false); } /** * Binds a name to an object. * * @param name the name to bind; may not be empty * @param obj the object to bind; possibly null * @exception NameAlreadyBoundException if name is already bound * @exception javax.naming.directory.InvalidAttributesException if object * did not supply all mandatory attributes * @exception NamingException if a naming exception is encountered */ @Override public void bind(String name, Object obj) throws NamingException { bind(new CompositeName(name), obj); } /** * Binds a name to an object, overwriting any existing binding. All * intermediate contexts and the target context (that named by all but * terminal atomic component of the name) must already exist. * <p> * If the object is a DirContext, any existing attributes associated with * the name are replaced with those of the object. Otherwise, any * existing attributes associated with the name remain unchanged. * * @param name the name to bind; may not be empty * @param obj the object to bind; possibly null * @exception javax.naming.directory.InvalidAttributesException if object * did not supply all mandatory attributes * @exception NamingException if a naming exception is encountered */ @Override public void rebind(Name name, Object obj) throws NamingException { bind(name, obj, true); } /** * Binds a name to an object, overwriting any existing binding. * * @param name the name to bind; may not be empty * @param obj the object to bind; possibly null * @exception javax.naming.directory.InvalidAttributesException if object * did not supply all mandatory attributes * @exception NamingException if a naming exception is encountered */ @Override public void rebind(String name, Object obj) throws NamingException { rebind(new CompositeName(name), obj); } /** * Unbinds the named object. Removes the terminal atomic name in name * from the target context--that named by all but the terminal atomic * part of name. * <p> * This method is idempotent. It succeeds even if the terminal atomic * name is not bound in the target context, but throws * NameNotFoundException if any of the intermediate contexts do not exist. * * @param name the name to bind; may not be empty * @exception NameNotFoundException if an intermediate context does not * exist * @exception NamingException if a naming exception is encountered */ @Override public void unbind(Name name) throws NamingException { if (!checkWritable()) { return; } while ((!name.isEmpty()) && (name.get(0).length() == 0)) name = name.getSuffix(1); if (name.isEmpty()) throw new NamingException (sm.getString("namingContext.invalidName")); NamingEntry entry = bindings.get(name.get(0)); if (entry == null) { throw new NameNotFoundException (sm.getString("namingContext.nameNotBound", name, name.get(0))); } if (name.size() > 1) { if (entry.type == NamingEntry.CONTEXT) { ((Context) entry.value).unbind(name.getSuffix(1)); } else { throw new NamingException (sm.getString("namingContext.contextExpected")); } } else { bindings.remove(name.get(0)); } } /** * Unbinds the named object. * * @param name the name to bind; may not be empty * @exception NameNotFoundException if an intermediate context does not * exist * @exception NamingException if a naming exception is encountered */ @Override public void unbind(String name) throws NamingException { unbind(new CompositeName(name)); } /** * Binds a new name to the object bound to an old name, and unbinds the * old name. Both names are relative to this context. Any attributes * associated with the old name become associated with the new name. * Intermediate contexts of the old name are not changed. * * @param oldName the name of the existing binding; may not be empty * @param newName the name of the new binding; may not be empty * @exception NameAlreadyBoundException if newName is already bound * @exception NamingException if a naming exception is encountered */ @Override public void rename(Name oldName, Name newName) throws NamingException { Object value = lookup(oldName); bind(newName, value); unbind(oldName); } /** * Binds a new name to the object bound to an old name, and unbinds the * old name. * * @param oldName the name of the existing binding; may not be empty * @param newName the name of the new binding; may not be empty * @exception NameAlreadyBoundException if newName is already bound * @exception NamingException if a naming exception is encountered */ @Override public void rename(String oldName, String newName) throws NamingException { rename(new CompositeName(oldName), new CompositeName(newName)); } /** * Enumerates the names bound in the named context, along with the class * names of objects bound to them. The contents of any subcontexts are * not included. * <p> * If a binding is added to or removed from this context, its effect on * an enumeration previously returned is undefined. * * @param name the name of the context to list * @return an enumeration of the names and class names of the bindings in * this context. Each element of the enumeration is of type NameClassPair. * @exception NamingException if a naming exception is encountered */ @Override public NamingEnumeration<NameClassPair> list(Name name) throws NamingException { // Removing empty parts while ((!name.isEmpty()) && (name.get(0).length() == 0)) name = name.getSuffix(1); if (name.isEmpty()) { return new NamingContextEnumeration(bindings.values().iterator()); } NamingEntry entry = bindings.get(name.get(0)); if (entry == null) { throw new NameNotFoundException (sm.getString("namingContext.nameNotBound", name, name.get(0))); } if (entry.type != NamingEntry.CONTEXT) { throw new NamingException (sm.getString("namingContext.contextExpected")); } return ((Context) entry.value).list(name.getSuffix(1)); } /** * Enumerates the names bound in the named context, along with the class * names of objects bound to them. * * @param name the name of the context to list * @return an enumeration of the names and class names of the bindings in * this context. Each element of the enumeration is of type NameClassPair. * @exception NamingException if a naming exception is encountered */ @Override public NamingEnumeration<NameClassPair> list(String name) throws NamingException { return list(new CompositeName(name)); } /** * Enumerates the names bound in the named context, along with the * objects bound to them. The contents of any subcontexts are not * included. * <p> * If a binding is added to or removed from this context, its effect on * an enumeration previously returned is undefined. * * @param name the name of the context to list * @return an enumeration of the bindings in this context. * Each element of the enumeration is of type Binding. * @exception NamingException if a naming exception is encountered */ @Override public NamingEnumeration<Binding> listBindings(Name name) throws NamingException { // Removing empty parts while ((!name.isEmpty()) && (name.get(0).length() == 0)) name = name.getSuffix(1); if (name.isEmpty()) { return new NamingContextBindingsEnumeration(bindings.values().iterator(), this); } NamingEntry entry = bindings.get(name.get(0)); if (entry == null) { throw new NameNotFoundException (sm.getString("namingContext.nameNotBound", name, name.get(0))); } if (entry.type != NamingEntry.CONTEXT) { throw new NamingException (sm.getString("namingContext.contextExpected")); } return ((Context) entry.value).listBindings(name.getSuffix(1)); } /** * Enumerates the names bound in the named context, along with the * objects bound to them. * * @param name the name of the context to list * @return an enumeration of the bindings in this context. * Each element of the enumeration is of type Binding. * @exception NamingException if a naming exception is encountered */ @Override public NamingEnumeration<Binding> listBindings(String name) throws NamingException { return listBindings(new CompositeName(name)); } /** * Destroys the named context and removes it from the namespace. Any * attributes associated with the name are also removed. Intermediate * contexts are not destroyed. * <p> * This method is idempotent. It succeeds even if the terminal atomic * name is not bound in the target context, but throws * NameNotFoundException if any of the intermediate contexts do not exist. * * In a federated naming system, a context from one naming system may be * bound to a name in another. One can subsequently look up and perform * operations on the foreign context using a composite name. However, an * attempt destroy the context using this composite name will fail with * NotContextException, because the foreign context is not a "subcontext" * of the context in which it is bound. Instead, use unbind() to remove * the binding of the foreign context. Destroying the foreign context * requires that the destroySubcontext() be performed on a context from * the foreign context's "native" naming system. * * @param name the name of the context to be destroyed; may not be empty * @exception NameNotFoundException if an intermediate context does not * exist * @exception NotContextException if the name is bound but does not name * a context, or does not name a context of the appropriate type */ @Override public void destroySubcontext(Name name) throws NamingException { if (!checkWritable()) { return; } while ((!name.isEmpty()) && (name.get(0).length() == 0)) name = name.getSuffix(1); if (name.isEmpty()) throw new NamingException (sm.getString("namingContext.invalidName")); NamingEntry entry = bindings.get(name.get(0)); if (entry == null) { throw new NameNotFoundException (sm.getString("namingContext.nameNotBound", name, name.get(0))); } if (name.size() > 1) { if (entry.type == NamingEntry.CONTEXT) { ((Context) entry.value).destroySubcontext(name.getSuffix(1)); } else { throw new NamingException (sm.getString("namingContext.contextExpected")); } } else { if (entry.type == NamingEntry.CONTEXT) { ((Context) entry.value).close(); bindings.remove(name.get(0)); } else { throw new NotContextException (sm.getString("namingContext.contextExpected")); } } } /** * Destroys the named context and removes it from the namespace. * * @param name the name of the context to be destroyed; may not be empty * @exception NameNotFoundException if an intermediate context does not * exist * @exception NotContextException if the name is bound but does not name * a context, or does not name a context of the appropriate type */ @Override public void destroySubcontext(String name) throws NamingException { destroySubcontext(new CompositeName(name)); } /** * Creates and binds a new context. Creates a new context with the given * name and binds it in the target context (that named by all but * terminal atomic component of the name). All intermediate contexts and * the target context must already exist. * * @param name the name of the context to create; may not be empty * @return the newly created context * @exception NameAlreadyBoundException if name is already bound * @exception javax.naming.directory.InvalidAttributesException if creation * of the sub-context requires specification of mandatory attributes * @exception NamingException if a naming exception is encountered */ @Override public Context createSubcontext(Name name) throws NamingException { if (!checkWritable()) { return null; } NamingContext newContext = new NamingContext(env, this.name); bind(name, newContext); newContext.setExceptionOnFailedWrite(getExceptionOnFailedWrite()); return newContext; } /** * Creates and binds a new context. * * @param name the name of the context to create; may not be empty * @return the newly created context * @exception NameAlreadyBoundException if name is already bound * @exception javax.naming.directory.InvalidAttributesException if creation * of the sub-context requires specification of mandatory attributes * @exception NamingException if a naming exception is encountered */ @Override public Context createSubcontext(String name) throws NamingException { return createSubcontext(new CompositeName(name)); } /** * Retrieves the named object, following links except for the terminal * atomic component of the name. If the object bound to name is not a * link, returns the object itself. * * @param name the name of the object to look up * @return the object bound to name, not following the terminal link * (if any). * @exception NamingException if a naming exception is encountered */ @Override public Object lookupLink(Name name) throws NamingException { return lookup(name, false); } /** * Retrieves the named object, following links except for the terminal * atomic component of the name. * * @param name the name of the object to look up * @return the object bound to name, not following the terminal link * (if any). * @exception NamingException if a naming exception is encountered */ @Override public Object lookupLink(String name) throws NamingException { return lookup(new CompositeName(name), false); } /** * Retrieves the parser associated with the named context. In a * federation of namespaces, different naming systems will parse names * differently. This method allows an application to get a parser for * parsing names into their atomic components using the naming convention * of a particular naming system. Within any single naming system, * NameParser objects returned by this method must be equal (using the * equals() test). * * @param name the name of the context from which to get the parser * @return a name parser that can parse compound names into their atomic * components * @exception NamingException if a naming exception is encountered */ @Override public NameParser getNameParser(Name name) throws NamingException { while ((!name.isEmpty()) && (name.get(0).length() == 0)) name = name.getSuffix(1); if (name.isEmpty()) return nameParser; if (name.size() > 1) { Object obj = bindings.get(name.get(0)); if (obj instanceof Context) { return ((Context) obj).getNameParser(name.getSuffix(1)); } else { throw new NotContextException (sm.getString("namingContext.contextExpected")); } } return nameParser; } /** * Retrieves the parser associated with the named context. * * @param name the name of the context from which to get the parser * @return a name parser that can parse compound names into their atomic * components * @exception NamingException if a naming exception is encountered */ @Override public NameParser getNameParser(String name) throws NamingException { return getNameParser(new CompositeName(name)); } /** * Composes the name of this context with a name relative to this context. * <p> * Given a name (name) relative to this context, and the name (prefix) * of this context relative to one of its ancestors, this method returns * the composition of the two names using the syntax appropriate for the * naming system(s) involved. That is, if name names an object relative * to this context, the result is the name of the same object, but * relative to the ancestor context. None of the names may be null. * * @param name a name relative to this context * @param prefix the name of this context relative to one of its ancestors * @return the composition of prefix and name * @exception NamingException if a naming exception is encountered */ @Override public Name composeName(Name name, Name prefix) throws NamingException { prefix = (Name) prefix.clone(); return prefix.addAll(name); } /** * Composes the name of this context with a name relative to this context. * * @param name a name relative to this context * @param prefix the name of this context relative to one of its ancestors * @return the composition of prefix and name * @exception NamingException if a naming exception is encountered */ @Override public String composeName(String name, String prefix) throws NamingException { return prefix + "/" + name; } /** * Adds a new environment property to the environment of this context. If * the property already exists, its value is overwritten. * * @param propName the name of the environment property to add; may not * be null * @param propVal the value of the property to add; may not be null * @exception NamingException if a naming exception is encountered */ @Override public Object addToEnvironment(String propName, Object propVal) throws NamingException { return env.put(propName, propVal); } /** * Removes an environment property from the environment of this context. * * @param propName the name of the environment property to remove; * may not be null * @exception NamingException if a naming exception is encountered */ @Override public Object removeFromEnvironment(String propName) throws NamingException { return env.remove(propName); } /** * Retrieves the environment in effect for this context. See class * description for more details on environment properties. * The caller should not make any changes to the object returned: their * effect on the context is undefined. The environment of this context * may be changed using addToEnvironment() and removeFromEnvironment(). * * @return the environment of this context; never null * @exception NamingException if a naming exception is encountered */ @Override public Hashtable<?,?> getEnvironment() throws NamingException { return env; } /** * Closes this context. This method releases this context's resources * immediately, instead of waiting for them to be released automatically * by the garbage collector. * This method is idempotent: invoking it on a context that has already * been closed has no effect. Invoking any other method on a closed * context is not allowed, and results in undefined behaviour. * * @exception NamingException if a naming exception is encountered */ @Override public void close() throws NamingException { if (!checkWritable()) { return; } env.clear(); } /** * Retrieves the full name of this context within its own namespace. * <p> * Many naming services have a notion of a "full name" for objects in * their respective namespaces. For example, an LDAP entry has a * distinguished name, and a DNS record has a fully qualified name. This * method allows the client application to retrieve this name. The string * returned by this method is not a JNDI composite name and should not be * passed directly to context methods. In naming systems for which the * notion of full name does not make sense, * OperationNotSupportedException is thrown. * * @return this context's name in its own namespace; never null * @exception OperationNotSupportedException if the naming system does * not have the notion of a full name * @exception NamingException if a naming exception is encountered */ @Override public String getNameInNamespace() throws NamingException { throw new OperationNotSupportedException (sm.getString("namingContext.noAbsoluteName")); //FIXME ? } // ------------------------------------------------------ Protected Methods /** * Retrieves the named object. * * @param name the name of the object to look up * @param resolveLinks If true, the links will be resolved * @return the object bound to name * @exception NamingException if a naming exception is encountered */ protected Object lookup(Name name, boolean resolveLinks) throws NamingException { // Removing empty parts while ((!name.isEmpty()) && (name.get(0).length() == 0)) name = name.getSuffix(1); if (name.isEmpty()) { // If name is empty, a newly allocated naming context is returned return new NamingContext(env, this.name, bindings); } NamingEntry entry = bindings.get(name.get(0)); if (entry == null) { throw new NameNotFoundException (sm.getString("namingContext.nameNotBound", name, name.get(0))); } if (name.size() > 1) { // If the size of the name is greater that 1, then we go through a // number of subcontexts. if (entry.type != NamingEntry.CONTEXT) { throw new NamingException (sm.getString("namingContext.contextExpected")); } return ((Context) entry.value).lookup(name.getSuffix(1)); } else { if ((resolveLinks) && (entry.type == NamingEntry.LINK_REF)) { String link = ((LinkRef) entry.value).getLinkName(); if (link.startsWith(".")) { // Link relative to this context return lookup(link.substring(1)); } else { return (new InitialContext(env)).lookup(link); } } else if (entry.type == NamingEntry.REFERENCE) { try { Object obj = NamingManager.getObjectInstance (entry.value, name, this, env); if(entry.value instanceof ResourceRef) { boolean singleton = Boolean.parseBoolean( (String) ((ResourceRef) entry.value).get( "singleton").getContent()); if (singleton) { entry.type = NamingEntry.ENTRY; entry.value = obj; } } return obj; } catch (NamingException e) { throw e; } catch (Exception e) { log.warn(sm.getString ("namingContext.failResolvingReference"), e); throw new NamingException(e.getMessage()); } } else { return entry.value; } } } /** * Binds a name to an object. All intermediate contexts and the target * context (that named by all but terminal atomic component of the name) * must already exist. * * @param name the name to bind; may not be empty * @param obj the object to bind; possibly null * @param rebind if true, then perform a rebind (ie, overwrite) * @exception NameAlreadyBoundException if name is already bound * @exception javax.naming.directory.InvalidAttributesException if object * did not supply all mandatory attributes * @exception NamingException if a naming exception is encountered */ protected void bind(Name name, Object obj, boolean rebind) throws NamingException { if (!checkWritable()) { return; } while ((!name.isEmpty()) && (name.get(0).length() == 0)) name = name.getSuffix(1); if (name.isEmpty()) throw new NamingException (sm.getString("namingContext.invalidName")); NamingEntry entry = bindings.get(name.get(0)); if (name.size() > 1) { if (entry == null) { throw new NameNotFoundException(sm.getString( "namingContext.nameNotBound", name, name.get(0))); } if (entry.type == NamingEntry.CONTEXT) { if (rebind) { ((Context) entry.value).rebind(name.getSuffix(1), obj); } else { ((Context) entry.value).bind(name.getSuffix(1), obj); } } else { throw new NamingException (sm.getString("namingContext.contextExpected")); } } else { if ((!rebind) && (entry != null)) { throw new NameAlreadyBoundException (sm.getString("namingContext.alreadyBound", name.get(0))); } else { // Getting the type of the object and wrapping it within a new // NamingEntry Object toBind = NamingManager.getStateToBind(obj, name, this, env); if (toBind instanceof Context) { entry = new NamingEntry(name.get(0), toBind, NamingEntry.CONTEXT); } else if (toBind instanceof LinkRef) { entry = new NamingEntry(name.get(0), toBind, NamingEntry.LINK_REF); } else if (toBind instanceof Reference) { entry = new NamingEntry(name.get(0), toBind, NamingEntry.REFERENCE); } else if (toBind instanceof Referenceable) { toBind = ((Referenceable) toBind).getReference(); entry = new NamingEntry(name.get(0), toBind, NamingEntry.REFERENCE); } else { entry = new NamingEntry(name.get(0), toBind, NamingEntry.ENTRY); } bindings.put(name.get(0), entry); } } } /** * Returns true if writing is allowed on this context. */ protected boolean isWritable() { return ContextAccessController.isWritable(name); } /** * Throws a naming exception is Context is not writable. */ protected boolean checkWritable() throws NamingException { if (isWritable()) { return true; } else { if (exceptionOnFailedWrite) { throw new javax.naming.OperationNotSupportedException( sm.getString("namingContext.readOnly")); } } return false; } }
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * JFlex 1.4.1 * * Copyright (C) 1998-2004 Gerwin Klein <lsf@jflex.de> * * All rights reserved. * * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License. See the file * * COPYRIGHT for more information. * * * * This program is distributed in the hope that it will be useful, * * but WITHOUT ANY WARRANTY; without even the implied warranty of * * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * * GNU General Public License for more details. * * * * You should have received a copy of the GNU General Public License along * * with this program; if not, write to the Free Software Foundation, Inc., * * 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ package JFlex.gui; import java.awt.*; import java.awt.event.*; import java.io.File; import JFlex.GeneratorException; import JFlex.Options; import JFlex.Skeleton; /** * A dialog for setting JFlex options * * @author Gerwin Klein * @version $Revision: 1.1.1.1 $, $Date: 2005/04/26 16:34:05 $ */ public class OptionsDialog extends Dialog { private Frame owner; private Button skelBrowse; private TextField skelFile; private Button ok; private Button defaults; private Checkbox dump; private Checkbox verbose; private Checkbox jlex; private Checkbox no_minimize; private Checkbox no_backup; private Checkbox time; private Checkbox dot; private Checkbox tableG; private Checkbox switchG; private Checkbox packG; /** * Create a new options dialog * * @param owner */ public OptionsDialog(Frame owner) { super(owner, "Options"); this.owner = owner; setup(); pack(); addWindowListener( new WindowAdapter() { public void windowClosing(WindowEvent e) { close(); } }); } public void setup() { // create components ok = new Button("Ok"); defaults = new Button("Defaults"); skelBrowse = new Button(" Browse"); skelFile = new TextField(); skelFile.setEditable(false); dump = new Checkbox(" dump"); verbose = new Checkbox(" verbose"); jlex = new Checkbox(" JLex compatibility"); no_minimize = new Checkbox(" skip minimization"); no_backup = new Checkbox(" no backup file"); time = new Checkbox(" time statistics"); dot = new Checkbox(" dot graph files"); CheckboxGroup codeG = new CheckboxGroup(); tableG = new Checkbox(" table",Options.gen_method == Options.TABLE, codeG); switchG = new Checkbox(" switch",Options.gen_method == Options.SWITCH, codeG); packG = new Checkbox(" pack",Options.gen_method == Options.PACK, codeG); // setup interaction ok.addActionListener( new ActionListener() { public void actionPerformed(ActionEvent e) { close(); } } ); defaults.addActionListener( new ActionListener() { public void actionPerformed(ActionEvent e) { setDefaults(); } } ); skelBrowse.addActionListener( new ActionListener() { public void actionPerformed(ActionEvent e) { skelBrowse(); } } ); tableG.addItemListener( new ItemListener() { public void itemStateChanged(ItemEvent e) { setGenMethod(); } } ); verbose.addItemListener( new ItemListener() { public void itemStateChanged(ItemEvent e) { Options.verbose = verbose.getState(); } } ); dump.addItemListener( new ItemListener() { public void itemStateChanged(ItemEvent e) { Options.dump = dump.getState(); } } ); jlex.addItemListener( new ItemListener() { public void itemStateChanged(ItemEvent e) { Options.jlex = jlex.getState(); } } ); no_minimize.addItemListener( new ItemListener() { public void itemStateChanged(ItemEvent e) { Options.no_minimize = no_minimize.getState(); } } ); no_backup.addItemListener( new ItemListener() { public void itemStateChanged(ItemEvent e) { Options.no_backup = no_backup.getState(); } } ); dot.addItemListener( new ItemListener() { public void itemStateChanged(ItemEvent e) { Options.dot = dot.getState(); } } ); time.addItemListener( new ItemListener() { public void itemStateChanged(ItemEvent e) { Options.time = time.getState(); } } ); // setup layout GridPanel panel = new GridPanel(4,7,10,10); panel.setInsets( new Insets(10,5,5,10) ); panel.add(3,0,ok); panel.add(3,1,defaults); panel.add(0,0,2,1,Handles.BOTTOM,new Label("skeleton file:")); panel.add(0,1,2,1,skelFile); panel.add(2,1,1,1,Handles.TOP, skelBrowse); panel.add(0,2,1,1,Handles.BOTTOM,new Label("code:")); panel.add(0,3,1,1,tableG); panel.add(0,4,1,1,switchG); panel.add(0,5,1,1,packG); panel.add(1,3,1,1,dump); panel.add(1,4,1,1,verbose); panel.add(1,5,1,1,time); panel.add(2,3,1,1,no_minimize); panel.add(2,4,1,1,no_backup); panel.add(3,3,1,1,jlex); panel.add(3,4,1,1,dot); add("Center",panel); updateState(); } private void skelBrowse() { FileDialog d = new FileDialog(owner , "Choose file", FileDialog.LOAD); d.show(); if (d.getFile() != null) { File skel = new File(d.getDirectory()+d.getFile()); try { Skeleton.readSkelFile(skel); skelFile.setText(skel.toString()); } catch (GeneratorException e) { // do nothing } } } private void setGenMethod() { if ( tableG.getState() ) { Options.gen_method = Options.TABLE; return; } if ( switchG.getState() ) { Options.gen_method = Options.SWITCH; return; } if ( packG.getState() ) { Options.gen_method = Options.PACK; return; } } private void updateState() { dump.setState(Options.dump); verbose.setState(Options.verbose); jlex.setState(Options.jlex); no_minimize.setState(Options.no_minimize); no_backup.setState(Options.no_backup); time.setState(Options.time); dot.setState(Options.dot); tableG.setState(Options.gen_method == Options.TABLE); switchG.setState(Options.gen_method == Options.SWITCH); packG.setState(Options.gen_method == Options.PACK); } private void setDefaults() { Options.setDefaults(); Skeleton.readDefault(); skelFile.setText(""); updateState(); } public void close() { hide(); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.fieldstats; import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.StringHelper; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; import java.net.InetAddress; public abstract class FieldStats<T> implements Writeable, ToXContent { private final byte type; private long maxDoc; private long docCount; private long sumDocFreq; private long sumTotalTermFreq; private boolean isSearchable; private boolean isAggregatable; protected T minValue; protected T maxValue; FieldStats(byte type, long maxDoc, boolean isSearchable, boolean isAggregatable) { this(type, maxDoc, 0, 0, 0, isSearchable, isAggregatable, null, null); } FieldStats(byte type, long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq, boolean isSearchable, boolean isAggregatable, T minValue, T maxValue) { this.type = type; this.maxDoc = maxDoc; this.docCount = docCount; this.sumDocFreq = sumDocFreq; this.sumTotalTermFreq = sumTotalTermFreq; this.isSearchable = isSearchable; this.isAggregatable = isAggregatable; this.minValue = minValue; this.maxValue = maxValue; } byte getType() { return this.type; } /** * @return the total number of documents. * * Note that, documents marked as deleted that haven't yet been merged way aren't taken into account. */ public long getMaxDoc() { return maxDoc; } /** * @return the number of documents that have at least one term for this field, * or -1 if this measurement isn't available. * * Note that, documents marked as deleted that haven't yet been merged way aren't taken into account. */ public long getDocCount() { return docCount; } /** * @return The percentage of documents that have at least one value for this field. * * This is a derived statistic and is based on: 'doc_count / max_doc' */ public int getDensity() { if (docCount < 0 || maxDoc <= 0) { return -1; } return (int) (docCount * 100 / maxDoc); } /** * @return the sum of each term's document frequency in this field, or -1 if this measurement isn't available. * Document frequency is the number of documents containing a particular term. * * Note that, documents marked as deleted that haven't yet been merged way aren't taken into account. */ public long getSumDocFreq() { return sumDocFreq; } /** * @return the sum of the term frequencies of all terms in this field across all documents, * or -1 if this measurement * isn't available. Term frequency is the total number of occurrences of a term in a particular document and field. * * Note that, documents marked as deleted that haven't yet been merged way aren't taken into account. */ public long getSumTotalTermFreq() { return sumTotalTermFreq; } /** * @return <code>true</code> if any of the instances of the field name is searchable. */ public boolean isSearchable() { return isSearchable; } /** * @return <code>true</code> if any of the instances of the field name is aggregatable. */ public boolean isAggregatable() { return isAggregatable; } /** * @return the lowest value in the field. * * Note that, documents marked as deleted that haven't yet been merged way aren't taken into account. */ public T getMinValue() { return minValue; } /** * @return the highest value in the field. * * Note that, documents marked as deleted that haven't yet been merged way aren't taken into account. */ public T getMaxValue() { return maxValue; } /** * @return the lowest value in the field represented as a string. * * Note that, documents marked as deleted that haven't yet been merged way aren't taken into account. */ public abstract String getMinValueAsString(); /** * @return the highest value in the field represented as a string. * * Note that, documents marked as deleted that haven't yet been merged way aren't taken into account. */ public abstract String getMaxValueAsString(); /** * @param value The string to be parsed * @param optionalFormat A string describing how to parse the specified value. Whether this parameter is supported * depends on the implementation. If optionalFormat is specified and the implementation * doesn't support it an {@link UnsupportedOperationException} is thrown */ protected abstract T valueOf(String value, String optionalFormat); /** * Accumulates the provided stats into this stats instance. */ public final void accumulate(FieldStats other) { this.maxDoc += other.maxDoc; if (other.docCount == -1) { this.docCount = -1; } else if (this.docCount != -1) { this.docCount += other.docCount; } if (other.sumDocFreq == -1) { this.sumDocFreq = -1; } else if (this.sumDocFreq != -1) { this.sumDocFreq += other.sumDocFreq; } if (other.sumTotalTermFreq == -1) { this.sumTotalTermFreq = -1; } else if (this.sumTotalTermFreq != -1) { this.sumTotalTermFreq += other.sumTotalTermFreq; } isSearchable |= other.isSearchable; isAggregatable |= other.isAggregatable; assert type == other.getType(); updateMinMax((T) other.minValue, (T) other.maxValue); } private void updateMinMax(T min, T max) { if (minValue == null) { minValue = min; } else if (min != null && compare(minValue, min) > 0) { minValue = min; } if (maxValue == null) { maxValue = max; } else if (max != null && compare(maxValue, max) < 0) { maxValue = max; } } protected abstract int compare(T o1, T o2); @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(Fields.MAX_DOC, maxDoc); builder.field(Fields.DOC_COUNT, docCount); builder.field(Fields.DENSITY, getDensity()); builder.field(Fields.SUM_DOC_FREQ, sumDocFreq); builder.field(Fields.SUM_TOTAL_TERM_FREQ, sumTotalTermFreq); builder.field(Fields.SEARCHABLE, isSearchable); builder.field(Fields.AGGREGATABLE, isAggregatable); toInnerXContent(builder); builder.endObject(); return builder; } protected void toInnerXContent(XContentBuilder builder) throws IOException { builder.field(Fields.MIN_VALUE, getMinValue()); builder.field(Fields.MIN_VALUE_AS_STRING, getMinValueAsString()); builder.field(Fields.MAX_VALUE, getMaxValue()); builder.field(Fields.MAX_VALUE_AS_STRING, getMaxValueAsString()); } @Override public final void writeTo(StreamOutput out) throws IOException { out.writeByte(type); out.writeLong(maxDoc); out.writeLong(docCount); out.writeLong(sumDocFreq); out.writeLong(sumTotalTermFreq); out.writeBoolean(isSearchable); out.writeBoolean(isAggregatable); boolean hasMinMax = minValue != null; out.writeBoolean(hasMinMax); if (hasMinMax) { writeMinMax(out); } } protected abstract void writeMinMax(StreamOutput out) throws IOException; /** * @return <code>true</code> if this instance matches with the provided index constraint, * otherwise <code>false</code> is returned */ public boolean match(IndexConstraint constraint) { if (minValue == null) { return false; } int cmp; T value = valueOf(constraint.getValue(), constraint.getOptionalFormat()); if (constraint.getProperty() == IndexConstraint.Property.MIN) { cmp = compare(minValue, value); } else if (constraint.getProperty() == IndexConstraint.Property.MAX) { cmp = compare(maxValue, value); } else { throw new IllegalArgumentException("Unsupported property [" + constraint.getProperty() + "]"); } switch (constraint.getComparison()) { case GT: return cmp > 0; case GTE: return cmp >= 0; case LT: return cmp < 0; case LTE: return cmp <= 0; default: throw new IllegalArgumentException("Unsupported comparison [" + constraint.getComparison() + "]"); } } public static class Long extends FieldStats<java.lang.Long> { public Long(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq, boolean isSearchable, boolean isAggregatable, long minValue, long maxValue) { super((byte) 0, maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable, minValue, maxValue); } public Long(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq, boolean isSearchable, boolean isAggregatable) { super((byte) 0, maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable, null, null); } public Long(long maxDoc, boolean isSearchable, boolean isAggregatable) { super((byte) 0, maxDoc, isSearchable, isAggregatable); } @Override public int compare(java.lang.Long o1, java.lang.Long o2) { return o1.compareTo(o2); } @Override public void writeMinMax(StreamOutput out) throws IOException { out.writeLong(minValue); out.writeLong(maxValue); } @Override public java.lang.Long valueOf(String value, String optionalFormat) { return java.lang.Long.parseLong(value); } @Override public String getMinValueAsString() { return minValue != null ? java.lang.Long.toString(minValue) : null; } @Override public String getMaxValueAsString() { return maxValue != null ? java.lang.Long.toString(maxValue) : null; } } public static class Double extends FieldStats<java.lang.Double> { public Double(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq, boolean isSearchable, boolean isAggregatable, double minValue, double maxValue) { super((byte) 1, maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable, minValue, maxValue); } public Double(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq, boolean isSearchable, boolean isAggregatable) { super((byte) 1, maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable, null, null); } public Double(long maxDoc, boolean isSearchable, boolean isAggregatable) { super((byte) 1, maxDoc, isSearchable, isAggregatable); } @Override public int compare(java.lang.Double o1, java.lang.Double o2) { return o1.compareTo(o2); } @Override public void writeMinMax(StreamOutput out) throws IOException { out.writeDouble(minValue); out.writeDouble(maxValue); } @Override public java.lang.Double valueOf(String value, String optionalFormat) { if (optionalFormat != null) { throw new UnsupportedOperationException("custom format isn't supported"); } return java.lang.Double.parseDouble(value); } @Override public String getMinValueAsString() { return minValue != null ? java.lang.Double.toString(minValue) : null; } @Override public String getMaxValueAsString() { return maxValue != null ? java.lang.Double.toString(maxValue) : null; } } public static class Date extends FieldStats<java.lang.Long> { private FormatDateTimeFormatter formatter; public Date(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq, boolean isSearchable, boolean isAggregatable, FormatDateTimeFormatter formatter, long minValue, long maxValue) { super((byte) 2, maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable, minValue, maxValue); this.formatter = formatter; } public Date(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq, boolean isSearchable, boolean isAggregatable, FormatDateTimeFormatter formatter) { super((byte) 2, maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable, null, null); this.formatter = formatter; } public Date(long maxDoc, boolean isSearchable, boolean isAggregatable, FormatDateTimeFormatter formatter) { super((byte) 2, maxDoc, isSearchable, isAggregatable); this.formatter = formatter; } @Override public int compare(java.lang.Long o1, java.lang.Long o2) { return o1.compareTo(o2); } @Override public void writeMinMax(StreamOutput out) throws IOException { out.writeString(formatter.format()); out.writeLong(minValue); out.writeLong(maxValue); } @Override public java.lang.Long valueOf(String value, String fmt) { FormatDateTimeFormatter f = formatter; if (fmt != null) { f = Joda.forPattern(fmt); } return f.parser().parseDateTime(value).getMillis(); } @Override public String getMinValueAsString() { return minValue != null ? formatter.printer().print(minValue) : null; } @Override public String getMaxValueAsString() { return maxValue != null ? formatter.printer().print(maxValue) : null; } } public static class Text extends FieldStats<BytesRef> { public Text(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq, boolean isSearchable, boolean isAggregatable, BytesRef minValue, BytesRef maxValue) { super((byte) 3, maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable, minValue, maxValue); } public Text(long maxDoc, boolean isSearchable, boolean isAggregatable) { super((byte) 3, maxDoc, isSearchable, isAggregatable); } @Override public int compare(BytesRef o1, BytesRef o2) { return o1.compareTo(o2); } @Override public void writeMinMax(StreamOutput out) throws IOException { out.writeBytesRef(minValue); out.writeBytesRef(maxValue); } @Override protected BytesRef valueOf(String value, String optionalFormat) { if (optionalFormat != null) { throw new UnsupportedOperationException("custom format isn't supported"); } return new BytesRef(value); } @Override public String getMinValueAsString() { return minValue != null ? minValue.utf8ToString() : null; } @Override public String getMaxValueAsString() { return maxValue != null ? maxValue.utf8ToString() : null; } @Override protected void toInnerXContent(XContentBuilder builder) throws IOException { builder.field(Fields.MIN_VALUE, getMinValueAsString()); builder.field(Fields.MAX_VALUE, getMaxValueAsString()); } } public static class Ip extends FieldStats<InetAddress> { public Ip(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq, boolean isSearchable, boolean isAggregatable, InetAddress minValue, InetAddress maxValue) { super((byte) 4, maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable, minValue, maxValue); } public Ip(long maxDoc, boolean isSearchable, boolean isAggregatable) { super((byte) 4, maxDoc, isSearchable, isAggregatable); } @Override public int compare(InetAddress o1, InetAddress o2) { byte[] b1 = InetAddressPoint.encode(o1); byte[] b2 = InetAddressPoint.encode(o2); return StringHelper.compare(b1.length, b1, 0, b2, 0); } @Override public void writeMinMax(StreamOutput out) throws IOException { byte[] b1 = InetAddressPoint.encode(minValue); byte[] b2 = InetAddressPoint.encode(maxValue); out.writeByte((byte) b1.length); out.writeBytes(b1); out.writeByte((byte) b2.length); out.writeBytes(b2); } @Override public InetAddress valueOf(String value, String fmt) { return InetAddresses.forString(value); } @Override public String getMinValueAsString() { return minValue != null ? NetworkAddress.format(minValue) : null; } @Override public String getMaxValueAsString() { return maxValue != null ? NetworkAddress.format(maxValue) : null; } } public static FieldStats readFrom(StreamInput in) throws IOException { byte type = in.readByte(); long maxDoc = in.readLong(); long docCount = in.readLong(); long sumDocFreq = in.readLong(); long sumTotalTermFreq = in.readLong(); boolean isSearchable = in.readBoolean(); boolean isAggregatable = in.readBoolean(); boolean hasMinMax = in.readBoolean(); switch (type) { case 0: if (hasMinMax) { return new Long(maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable, in.readLong(), in.readLong()); } return new Long(maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable); case 1: if (hasMinMax) { return new Double(maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable, in.readDouble(), in.readDouble()); } return new Double(maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable); case 2: FormatDateTimeFormatter formatter = Joda.forPattern(in.readString()); if (hasMinMax) { return new Date(maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable, formatter, in.readLong(), in.readLong()); } return new Date(maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable, formatter); case 3: if (hasMinMax) { return new Text(maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable, in.readBytesRef(), in.readBytesRef()); } return new Text(maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable, null, null); case 4: InetAddress min = null; InetAddress max = null; if (hasMinMax) { int l1 = in.readByte(); byte[] b1 = new byte[l1]; int l2 = in.readByte(); byte[] b2 = new byte[l2]; min = InetAddressPoint.decode(b1); max = InetAddressPoint.decode(b2); } return new Ip(maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable, min, max); default: throw new IllegalArgumentException("Unknown type."); } } public static String typeName(byte type) { switch (type) { case 0: return "whole-number"; case 1: return "floating-point"; case 2: return "date"; case 3: return "text"; case 4: return "ip"; default: throw new IllegalArgumentException("Unknown type."); } } private final static class Fields { final static String MAX_DOC = new String("max_doc"); final static String DOC_COUNT = new String("doc_count"); final static String DENSITY = new String("density"); final static String SUM_DOC_FREQ = new String("sum_doc_freq"); final static String SUM_TOTAL_TERM_FREQ = new String("sum_total_term_freq"); final static String SEARCHABLE = new String("searchable"); final static String AGGREGATABLE = new String("aggregatable"); final static String MIN_VALUE = new String("min_value"); final static String MIN_VALUE_AS_STRING = new String("min_value_as_string"); final static String MAX_VALUE = new String("max_value"); final static String MAX_VALUE_AS_STRING = new String("max_value_as_string"); } }
package org.jgroups.util; import org.jgroups.annotations.GuardedBy; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import java.util.function.Consumer; import java.util.stream.LongStream; /** * Table for storing requests associated with monotonically increasing sequence numbers (seqnos).<p/> * Could be used for example in {@link org.jgroups.blocks.RequestCorrelator}. Grows and shrinks when needed. * Addition is always at the end, yielding monotonically increasing seqnos. Removal is done by nulling the element(s) * between low and high and advancing the low pointer whenever possible.<p/> * See <a href="https://issues.jboss.org/browse/JGRP-1982">JGRP-1982</a> for details. * @author Bela Ban * @since 3.6.7 */ public class RequestTable<T> { protected T[] buffer; // the ring buffer protected long low; // pointing to the next element to be removed; low is always <= high protected long high; // pointing to the next element to be added; high is >= low protected int removes_till_compaction; // number of removes before attempt compaction (0 disables this) protected int num_removes; // current number of removes protected final Lock lock=new ReentrantLock(); // to synchronize modifications public interface Visitor<T> { boolean visit(T element); } public RequestTable(final int capacity) { this(capacity, 0, 0); } public RequestTable(final int capacity, long low, long high) { int len=Util.getNextHigherPowerOfTwo(capacity); this.buffer=(T[])new Object[len]; this.low=low; this.high=high; } public long low() {return low;} public long high() {return high;} public int capacity() {return buffer.length;} public int index(long seqno) {return (int)((seqno) & (capacity()-1));} public int removesTillCompaction() {return removes_till_compaction;} public RequestTable<T> removesTillCompaction(int rems) {this.removes_till_compaction=rems; return this;} /** * Adds a new element and returns the sequence number at which it was inserted. Advances the high * pointer and grows the buffer if needed. * @param element the element to be added. Must not be null or an exception will be thrown * @return the seqno at which element was added */ public long add(T element) { lock.lock(); try { long next=high+1; if(next - low > capacity()) _grow(next-low); int high_index=index(high); buffer[high_index]=element; return high++; } finally { lock.unlock(); } } public T get(long seqno) { lock.lock(); try { int index=index(seqno); return buffer[index]; } finally { lock.unlock(); } } /** * Removes the element at the index matching seqno. If seqno == low, tries to advance low until a non-null element * is encountered, up to high * @param seqno * @return */ public T remove(long seqno) { lock.lock(); try { if(seqno < low || seqno > high) return null; int index=index(seqno); T retval=buffer[index]; if(retval != null && removes_till_compaction > 0) num_removes++; buffer[index]=null; if(seqno == low) advanceLow(); if(removes_till_compaction > 0 && num_removes >= removes_till_compaction) { _compact(); num_removes=0; } return retval; } finally { lock.unlock(); } } /** * Removes all elements in the stream. Calls the consumer (if not null) on non-null elements */ public RequestTable<T> removeMany(LongStream seqnos, Consumer<T> consumer) { if(seqnos == null) return this; AtomicBoolean advance=new AtomicBoolean(false); seqnos.forEach(seqno -> { T element=null; lock.lock(); try { if(seqno < low || seqno > high) return; int index=index(seqno); if((element=buffer[index]) != null && removes_till_compaction > 0) num_removes++; buffer[index]=null; if(seqno == low) advance.set(true); } finally { lock.unlock(); } if(consumer != null) consumer.accept(element); }); lock.lock(); try { if(advance.get()) advanceLow(); if(removes_till_compaction > 0 && num_removes >= removes_till_compaction) { _compact(); num_removes=0; } } finally { lock.unlock(); } return this; } /** Removes all elements, compacts the buffer and sets low=high=0 */ public RequestTable<T> clear() {return clear(0);} public RequestTable<T> clear(long mark) { lock.lock(); try { low=high=mark; buffer=(T[])new Object[2]; return this; } finally { lock.unlock(); } } public RequestTable<T> forEach(Visitor<T> visitor) { if(visitor == null) return null; lock.lock(); try { for(long i=low, num_iterations=0; i < high && num_iterations < buffer.length; i++, num_iterations++) { int index=index(i); T el=buffer[index]; if(!visitor.visit(el)) break; } return this; } finally { lock.unlock(); } } /** * Non-blocking alternative to {@link #forEach(Visitor)}: iteration is performed on the array that exists at the * time of this call. Changes to the underlying array will not be reflected in the iteration. * @param visitor the {@link Visitor}. */ public RequestTable<T> forEachNonBlocking(Visitor<T> visitor) { if(visitor == null) return null; T[] buf; long lo, hi; lock.lock(); try { buf=this.buffer; lo=this.low; hi=this.high; } finally { lock.unlock(); } for(long i=lo, num_iterations=0; i < hi && num_iterations < buf.length; i++, num_iterations++) { int index=index(i); T el=buf[index]; if(!visitor.visit(el)) break; } return this; } /** * Grows the array to at least new_capacity. This method is mainly used for testing and is not typically called * directly, but indirectly when adding elements and the underlying array has no space left. * @param new_capacity the new capacity of the underlying array. Will be rounded up to the nearest power of 2 value. * A value smaller than the current capacity is ignored. */ public RequestTable<T> grow(int new_capacity) { lock.lock(); try { _grow(new_capacity); return this; } finally { lock.unlock(); } } /** * Shrinks the underlying array to half its size _if_ the new array can hold all of the existing elements. * @return true if the compaction succeeded, or false if it failed (e.g. not enough space) */ public boolean compact() { lock.lock(); try { return _compact(); } finally { lock.unlock(); } } /** * Checks if there is at least buffer.length/2 contiguous space in range [low+1 .. high-1] available */ public boolean contiguousSpaceAvailable() { lock.lock(); try { return _contiguousSpaceAvailable(buffer.length >> 1); } finally { lock.unlock(); } } /** * Returns the number of non-null elements in range [low .. high-1] * @return */ public int size() { int retval=0; for(long i=low, num_iterations=0; i < high && num_iterations < buffer.length; i++, num_iterations++) { int index=index(i); if(buffer[index] != null) retval++; } return retval; } public String toString() { return String.format("low=%d high=%d cap=%d, %d element(s)", low, high, buffer.length, size()); } @GuardedBy("lock") protected void _grow(long new_capacity) { int new_cap=Util.getNextHigherPowerOfTwo((int)Math.max(buffer.length, new_capacity)); if(new_cap == buffer.length) return; _copy(new_cap); } /** * Shrinks the array to half of its current size if the current number of elements fit into half of the capacity. * @return true if the compaction succeeded, else false (e.g. when the current elements would not fit) */ @GuardedBy("lock") protected boolean _compact() { int new_cap=buffer.length >> 1; // needs to be a power of 2 for efficient modulo operation, e.g. for index() // boolean compactable=this.buffer.length > 0 && (size() <= new_cap || (contiguousSpaceAvailable=_contiguousSpaceAvailable(new_cap))); boolean compactable=this.buffer.length > 0 && high-low <= new_cap; if(!compactable) return false; // not enough space to shrink the buffer to half its size _copy(new_cap); return true; } public String dumpContents() { StringBuilder sb=new StringBuilder(); lock.lock(); try { int new_cap=buffer.length >> 1; for(long i=low, num_iterations=0; i < high && num_iterations < buffer.length; i++, num_iterations++) { int index=index(i); T el=buffer[index]; if(el != null) { long hash=el.hashCode(); int small_idx=index(i, new_cap); sb.append(String.format("seqno %d: index: %d val: %d, index in %d-buffer: %d\n", i, index, hash, new_cap, small_idx)); } } } finally { lock.unlock(); } return sb.toString(); } /** Copies elements from old into new array */ protected void _copy(int new_cap) { // copy elements from [low to high-1] into new indices in new array T[] new_buf=(T[])new Object[new_cap]; int new_len=new_buf.length; int old_len=this.buffer.length; for(long i=low, num_iterations=0; i < high && num_iterations < old_len; i++, num_iterations++) { int old_index=index(i, old_len); if(this.buffer[old_index] != null) { int new_index=index(i, new_len); new_buf[new_index]=this.buffer[old_index]; } } this.buffer=new_buf; } /** * Check if we have at least space_needed contiguous free slots available in range [low+1 .. high-1] * @param space_needed the number of contiguous free slots required to do compaction, usually half of the current * buffer size * @return true if a contiguous space was found, false otherwise */ @GuardedBy("lock") protected boolean _contiguousSpaceAvailable(int space_needed) { int num_slots_scanned=0; int size_of_contiguous_area=0; if(high-low-1 < space_needed) return false; for(long i=low+1; i < high; i++) { num_slots_scanned++; int index=index(i); if(this.buffer[index] == null) { if(++size_of_contiguous_area >= space_needed) return true; } else { size_of_contiguous_area=0; // we scanned more than half of the current array and found an occupied slot, so there is no chance of // finding space_needed contiguous free slots as we have less than half of the current array to scan if(num_slots_scanned > space_needed || high-i-1 < space_needed) return false; } } return false; } protected int highestContiguousSpaceAvailable() { int size_of_current_contiguous_area=0; int highest=0; for(long i=low+1; i < high; i++) { int index=index(i); if(this.buffer[index] == null) size_of_current_contiguous_area++; else { highest=Math.max(highest, size_of_current_contiguous_area); size_of_current_contiguous_area=0; } } return Math.max(highest, size_of_current_contiguous_area); } @GuardedBy("lock") protected void advanceLow() { while(low < high) { int index=index(low); if(buffer[index] != null) break; low++; } } protected static int index(long seqno, int length) {return (int)((seqno) & length-1);} }
package ru.stqa.pft.appmanager; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.firefox.FirefoxDriver; import ru.stqa.pft.model.ContactData; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; /** * Created by Owner on 7/17/2016. */ public class ContactHelper extends ContactHelperBase { public ContactHelper(FirefoxDriver wd) { super(wd); } public void goToHomePage() { wd.findElement(By.linkText("home")).click(); } public void submitContactForm() { click(By.name("submit")); } public void fillOutContactForm(ContactData contactData) { wd.get("http://localhost/addressbook/edit.php"); type(By.name("firstname"), contactData.getFirstname()); click(By.cssSelector("label")); type(By.name("middlename"), contactData.getMiddlename()); click(By.name("theform")); type(By.name("lastname"), contactData.getLastname()); type(By.name("nickname"), contactData.getNickname()); } public void modify(ContactData contact) { selectContactById(contact.getId()); initContactModification((contact.getId())); updateContactForm(); submitContactModification(); goToHomePage(); } public void deleteContact() { wd.findElement(By.xpath("//div[@id='content']/form[2]/div[2]/input")).click(); wd.switchTo().alert().accept(); } public void selectContact(int index) { wd.findElements(By.name("selected[]")).get(index).click(); } public void selectContactById(int id) { wd.findElement(By.cssSelector("input[id='" + id + "']")).click(); } private void openDetailsPage(int id) { wd.findElement(By.xpath(".//*[@id='" + id + "']/ ../../td[7]/a")).click(); } public ContactData infoFromEditForm(ContactData contact) { goToHomePage(); initContactModificationById(contact.getId()); String firstname = wd.findElement(By.name("firstname")).getAttribute("value"); String lastname = wd.findElement(By.name("lastname")).getAttribute("value"); String address = wd.findElement(By.name("address")).getText(); String home = wd.findElement(By.name("home")).getAttribute("value"); String mobile = wd.findElement(By.name("mobile")).getAttribute("value"); String work = wd.findElement(By.name("work")).getAttribute("value"); String email = wd.findElement(By.name("email")).getAttribute("value"); String email2 = wd.findElement(By.name ("email2")).getAttribute("value"); String email3 = wd.findElement(By.name("email3")).getAttribute("value"); wd.navigate().back(); return new ContactData().withId(contact.getId()) .withFirstname(firstname).withLastname(lastname) .withHomePhone(home) .withMobilePhone(mobile) .withWorkPhone(work) .withAddress(address) .withEmail(email) .withEmail2(email2) .withEmail3(email3); } public ContactData infoFromDetailsForm(ContactData contact) { goToDetailsForm(contact.getId()); String contactDetails = wd.findElement(By.xpath(".//*[@id='content']")).getText(); goToHomePage(); return new ContactData().withId(contact.getId()).withContactDetails(contactDetails); } private void goToDetailsForm(int id) { click(By.xpath("//input[@id = '" + id + "']/../following-sibling::td[6]/a")); } private void initContactModificationById(int id) { // WebElement checkbox = wd.findElement(By.cssSelector(String.format("input[value='%s']", id))); // WebElement row = checkbox.findElement(By.xpath("./../..")); // List<WebElement> cells = row.findElements(By.tagName("td")); //cells.get(7).findElement(By.tagName("a")).click(); wd.findElement(By.cssSelector(String.format("a[href='edit.php?id=%s']", id))).click(); } public void initContactModification(int id) { wd.findElement(By.cssSelector("a[href='edit.php?id=" + id + "']")).click(); } public void submitContactModification() { click(By.name("update")); } public void selectContactMofication(int id) { wd.findElement(By.cssSelector("input[id='" + id + "']")).click(); } public void updateContactForm() { wd.findElement(By.name("nickname")).click(); wd.findElement(By.name("nickname")).clear(); wd.findElement(By.name("nickname")).sendKeys("Jaifar-Jaifar"); } private void logOut() { } public boolean isThereAContact() { return isElementPresent(By.xpath("selected[]")); } public int getContactCount() { return wd.findElements(By.name("selected[]")).size(); } public void create(ContactData contact) { fillOutContactForm(contact); submitContactForm(); goToHomePage(); } public void delete(int index) { selectContact(index); deleteContact(); returnsToHomePage(); } public void delete(ContactData contact) { selectContactById(contact.getId()); deleteContact(); returnsToHomePage(); } public List<ContactData> list() { List<ContactData> contacts = new ArrayList<ContactData>(); List<WebElement> rows = wd.findElements(By.name("entry")); for (WebElement row : rows) { List<WebElement> cells = row.findElements(By.tagName("td")); String lastname = cells.get(1).getText(); String firstname = cells.get(2).getText(); String address = cells.get(3).getText(); String AllEmails = cells.get(4).getText(); String Allphones = cells.get(5).getText(); int id = Integer.parseInt(row.findElement(By.tagName("input")).getAttribute("value")); contacts.add(new ContactData().withId(id).withLastname(lastname).withFirstname(firstname).withAddress(address) .withAllemails(AllEmails).withAllPhones(Allphones)); } return contacts; } public Set<ContactData> all() { Set<ContactData> contacts = new HashSet<ContactData>(); List<WebElement> rows = wd.findElements(By.name("entry")); for (WebElement row : rows) { List<WebElement> cells = row.findElements(By.tagName("td")); int id = Integer.parseInt(row.findElement(By.tagName("input")).getAttribute("value")); String lastname = cells.get(1).getText(); String firstname = cells.get(2).getText(); String address = cells.get(3).getText(); String allPhones = cells.get(5).getText(); String allEmails = cells.get(4).getText(); contacts.add(new ContactData().withId(id).withLastname(lastname).withFirstname(firstname).withAllPhones(allPhones) .withAllemails(allEmails).withAddress(address)); } return contacts; } protected void fillOutContactForm() { wd.findElement(By.xpath("//table[@id='maintable']/tbody/tr[4]/td[8]/a/img")).click(); wd.findElement(By.name("nickname")).click(); wd.findElement(By.name("nickname")).clear(); wd.findElement(By.name("nickname")).sendKeys("Jaifar Jaifar"); wd.findElement(By.xpath("//div[@id='content']/form[1]")).click(); wd.findElement(By.name("homepage")).click(); wd.findElement(By.name("homepage")).clear(); wd.findElement(By.name("homepage")).sendKeys("www.facebook.com"); wd.findElement(By.xpath("//div[@id='content']/form[1]/input[22]")).click(); wd.findElement(By.linkText("home page")).click(); } public void returnsToHomePage() { click(By.linkText("home")); } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package com.jme3.gde.terraineditor; import com.jme3.asset.TextureKey; import com.jme3.bounding.BoundingBox; import com.jme3.gde.core.assets.ProjectAssetManager; import com.jme3.gde.core.scene.SceneApplication; import com.jme3.gde.core.sceneexplorer.nodes.actions.AbstractNewSpatialWizardAction; import com.jme3.gde.core.sceneexplorer.nodes.actions.NewSpatialAction; import com.jme3.scene.Node; import com.jme3.scene.Spatial; import com.jme3.terrain.Terrain; import com.jme3.terrain.geomipmap.TerrainLodControl; import com.jme3.terrain.geomipmap.TerrainQuad; import com.jme3.terrain.geomipmap.lodcalc.DistanceLodCalculator; import com.jme3.terrain.heightmap.AbstractHeightMap; import com.jme3.texture.Texture; import com.jme3.texture.Texture.WrapMode; import java.awt.Component; import java.awt.Dialog; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; import java.text.MessageFormat; import javax.imageio.ImageIO; import javax.swing.JComponent; import jme3tools.converters.ImageToAwt; import org.openide.DialogDisplayer; import org.openide.WizardDescriptor; import org.openide.loaders.DataObject; import org.openide.util.Exceptions; /** * * @author normenhansen */ @org.openide.util.lookup.ServiceProvider(service = NewSpatialAction.class) @SuppressWarnings({"unchecked", "rawtypes"}) public class AddTerrainAction extends AbstractNewSpatialWizardAction { private WizardDescriptor.Panel[] panels; public AddTerrainAction() { name = "Terrain.."; } @Override protected Object showWizard(org.openide.nodes.Node node) { WizardDescriptor wizardDescriptor = new WizardDescriptor(getPanels()); wizardDescriptor.setTitleFormat(new MessageFormat("{0}")); wizardDescriptor.setTitle("Terrain Wizard"); wizardDescriptor.putProperty("main_node", node); Dialog dialog = DialogDisplayer.getDefault().createDialog(wizardDescriptor); dialog.setVisible(true); dialog.toFront(); boolean cancelled = wizardDescriptor.getValue() != WizardDescriptor.FINISH_OPTION; if (!cancelled) { return wizardDescriptor; } return null; } @Override protected Spatial doCreateSpatial(Node parent, Object properties) { if (properties != null) { try { return generateTerrain(parent, (WizardDescriptor) properties); } catch (IOException ex) { Exceptions.printStackTrace(ex); } } return null; } protected Spatial generateTerrain(Node parent, final WizardDescriptor wizardDescriptor) throws IOException { org.openide.nodes.Node selectedNode = (org.openide.nodes.Node) wizardDescriptor.getProperty("main_node"); final Spatial spatial = selectedNode.getLookup().lookup(Spatial.class); String sceneName = selectedNode.getLookup().lookup(DataObject.class).getName(); int totalSize = (Integer) wizardDescriptor.getProperty("totalSize"); int patchSize = (Integer) wizardDescriptor.getProperty("patchSize"); int alphaTextureSize = (Integer) wizardDescriptor.getProperty("alphaTextureSize"); float[] heightmapData = null; AbstractHeightMap heightmap = null; if (wizardDescriptor.getProperty("abstractHeightMap") != null) heightmap = (AbstractHeightMap) wizardDescriptor.getProperty("abstractHeightMap"); if (heightmap != null) { heightmap.load(); // can take a while Float smooth = (Float) wizardDescriptor.getProperty("heightMapSmooth"); if (smooth > 0) heightmap.smooth(smooth, 2); heightmapData = heightmap.getHeightMap(); } return doCreateTerrain(parent, totalSize, patchSize, alphaTextureSize, heightmapData, sceneName, selectedNode); } protected Spatial doCreateTerrain(Node parent, int totalSize, int patchSize, int alphaTextureSize, float[] heightmapData, String sceneName, org.openide.nodes.Node selectedNode) throws IOException { final ProjectAssetManager manager = selectedNode.getLookup().lookup(ProjectAssetManager.class); Terrain terrain = new TerrainQuad("terrain-"+sceneName, patchSize, totalSize, heightmapData); //TODO make this pluggable for different Terrain implementations com.jme3.material.Material mat = new com.jme3.material.Material(manager, "Common/MatDefs/Terrain/TerrainLighting.j3md"); String assetFolder = ""; if (manager != null) { assetFolder = manager.getAssetFolderName(); // write out 3 alpha blend images for (int i=0; i<TerrainEditorController.NUM_ALPHA_TEXTURES; i++) { BufferedImage alphaBlend = new BufferedImage(alphaTextureSize, alphaTextureSize, BufferedImage.TYPE_INT_ARGB); if (i == 0) { // the first alpha level should be opaque so we see the first texture over the whole terrain for (int h=0; h<alphaTextureSize; h++) for (int w=0; w<alphaTextureSize; w++) alphaBlend.setRGB(w, h, 0x00FF0000);//argb } File textureFolder = new File(assetFolder + "/Textures/"); if (!textureFolder.exists()) { if (!textureFolder.mkdir()) { throw new IOException("Could not create the Texture Folder (assets/Textures)!"); } } File alphaFolder = new File(assetFolder + "/Textures/terrain-alpha/"); if (!alphaFolder.exists()) { if (!alphaFolder.mkdir()) { throw new IOException("Could not create the Terrain Alpha Folder (assets/Textures/terrain-alpha)!"); } } String alphaBlendFileName = "/Textures/terrain-alpha/"+sceneName+"-"+((Node)terrain).getName()+"-alphablend"+i+".png"; File alphaImageFile = new File(assetFolder+alphaBlendFileName); ImageIO.write(alphaBlend, "png", alphaImageFile); Texture tex = manager.loadAsset(new TextureKey(alphaBlendFileName, false)); switch (i) { case 0: mat.setTexture("AlphaMap", tex); break; case 1: mat.setTexture("AlphaMap_1", tex); break; case 2: mat.setTexture("AlphaMap_2", tex); break; default: break; } } Texture defaultTexture = manager.loadTexture(TerrainEditorController.DEFAULT_TERRAIN_TEXTURE); // copy the default texture to the assets folder if it doesn't exist there yet String dirtTextureName = "/Textures/dirt.jpg"; File dirtTextureFile = new File(assetFolder+dirtTextureName); if (!dirtTextureFile.exists()) { BufferedImage bi = ImageToAwt.convert(defaultTexture.getImage(), false, true, 0); ImageIO.write(bi, "jpg", dirtTextureFile); } // give the first layer default texture Texture dirtTexture = manager.loadTexture(dirtTextureName); dirtTexture.setWrap(WrapMode.Repeat); mat.setTexture("DiffuseMap", dirtTexture); mat.setFloat("DiffuseMap_0_scale", TerrainEditorController.DEFAULT_TEXTURE_SCALE); mat.setBoolean("WardIso", true); mat.setFloat("Shininess", 0.01f); ((Node)terrain).setMaterial(mat); ((Node)terrain).setModelBound(new BoundingBox()); ((Node)terrain).updateModelBound(); ((Node)terrain).setLocalTranslation(0, 0, 0); ((Node)terrain).setLocalScale(1f, 1f, 1f); // add the lod control TerrainLodControl control = new TerrainLodControl(terrain, SceneApplication.getApplication().getCamera()); control.setLodCalculator(new DistanceLodCalculator(patchSize, 2.7f)); ((Node)terrain).addControl(control); parent.attachChild((Node)terrain); //setNeedsSave(true); //addSpatialUndo(parent, (Node)terrain, jmeNodeParent); } return (Spatial)terrain; } /** * Initialize panels representing individual wizard's steps and sets * various properties for them influencing wizard appearance. */ private WizardDescriptor.Panel[] getPanels() { if (panels == null) { panels = new WizardDescriptor.Panel[]{ new CreateTerrainWizardPanel1(), new CreateTerrainWizardPanel2(), new CreateTerrainWizardPanel3() }; String[] steps = new String[panels.length]; for (int i = 0; i < panels.length; i++) { Component c = panels[i].getComponent(); // Default step name to component name of panel. Mainly useful // for getting the name of the target chooser to appear in the // list of steps. steps[i] = c.getName(); if (c instanceof JComponent) { // assume Swing components JComponent jc = (JComponent) c; // Sets step number of a component jc.putClientProperty(WizardDescriptor.PROP_CONTENT_SELECTED_INDEX, i); // Sets steps names for a panel jc.putClientProperty(WizardDescriptor.PROP_CONTENT_DATA, steps); // Turn on subtitle creation on each step jc.putClientProperty(WizardDescriptor.PROP_AUTO_WIZARD_STYLE, Boolean.TRUE); // Show steps on the left side with the image on the background jc.putClientProperty(WizardDescriptor.PROP_CONTENT_DISPLAYED, Boolean.TRUE); // Turn on numbering of all steps jc.putClientProperty(WizardDescriptor.PROP_CONTENT_NUMBERED, Boolean.TRUE); } } } return panels; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.resourcegroups.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/resource-groups-2017-11-27/GroupResources" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class GroupResourcesRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The name or the ARN of the resource group to add resources to. * </p> */ private String group; /** * <p> * The list of ARNs for resources to be added to the group. * </p> */ private java.util.List<String> resourceArns; /** * <p> * The name or the ARN of the resource group to add resources to. * </p> * * @param group * The name or the ARN of the resource group to add resources to. */ public void setGroup(String group) { this.group = group; } /** * <p> * The name or the ARN of the resource group to add resources to. * </p> * * @return The name or the ARN of the resource group to add resources to. */ public String getGroup() { return this.group; } /** * <p> * The name or the ARN of the resource group to add resources to. * </p> * * @param group * The name or the ARN of the resource group to add resources to. * @return Returns a reference to this object so that method calls can be chained together. */ public GroupResourcesRequest withGroup(String group) { setGroup(group); return this; } /** * <p> * The list of ARNs for resources to be added to the group. * </p> * * @return The list of ARNs for resources to be added to the group. */ public java.util.List<String> getResourceArns() { return resourceArns; } /** * <p> * The list of ARNs for resources to be added to the group. * </p> * * @param resourceArns * The list of ARNs for resources to be added to the group. */ public void setResourceArns(java.util.Collection<String> resourceArns) { if (resourceArns == null) { this.resourceArns = null; return; } this.resourceArns = new java.util.ArrayList<String>(resourceArns); } /** * <p> * The list of ARNs for resources to be added to the group. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setResourceArns(java.util.Collection)} or {@link #withResourceArns(java.util.Collection)} if you want to * override the existing values. * </p> * * @param resourceArns * The list of ARNs for resources to be added to the group. * @return Returns a reference to this object so that method calls can be chained together. */ public GroupResourcesRequest withResourceArns(String... resourceArns) { if (this.resourceArns == null) { setResourceArns(new java.util.ArrayList<String>(resourceArns.length)); } for (String ele : resourceArns) { this.resourceArns.add(ele); } return this; } /** * <p> * The list of ARNs for resources to be added to the group. * </p> * * @param resourceArns * The list of ARNs for resources to be added to the group. * @return Returns a reference to this object so that method calls can be chained together. */ public GroupResourcesRequest withResourceArns(java.util.Collection<String> resourceArns) { setResourceArns(resourceArns); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getGroup() != null) sb.append("Group: ").append(getGroup()).append(","); if (getResourceArns() != null) sb.append("ResourceArns: ").append(getResourceArns()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof GroupResourcesRequest == false) return false; GroupResourcesRequest other = (GroupResourcesRequest) obj; if (other.getGroup() == null ^ this.getGroup() == null) return false; if (other.getGroup() != null && other.getGroup().equals(this.getGroup()) == false) return false; if (other.getResourceArns() == null ^ this.getResourceArns() == null) return false; if (other.getResourceArns() != null && other.getResourceArns().equals(this.getResourceArns()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getGroup() == null) ? 0 : getGroup().hashCode()); hashCode = prime * hashCode + ((getResourceArns() == null) ? 0 : getResourceArns().hashCode()); return hashCode; } @Override public GroupResourcesRequest clone() { return (GroupResourcesRequest) super.clone(); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper; import com.carrotsearch.hppc.ObjectObjectMap; import com.carrotsearch.hppc.ObjectObjectOpenHashMap; import com.google.common.collect.Lists; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchIllegalArgumentException; import org.elasticsearch.ElasticsearchIllegalStateException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.all.AllEntries; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.mapper.DocumentMapper.ParseListener; import org.elasticsearch.index.mapper.object.RootObjectMapper; import java.util.*; /** * */ public abstract class ParseContext { /** Fork of {@link org.apache.lucene.document.Document} with additional functionality. */ public static class Document implements Iterable<IndexableField> { private final Document parent; private final String path; private final String prefix; private final List<IndexableField> fields; private ObjectObjectMap<Object, IndexableField> keyedFields; private Document(String path, Document parent) { fields = Lists.newArrayList(); this.path = path; this.prefix = path.isEmpty() ? "" : path + "."; this.parent = parent; } public Document() { this("", null); } /** * Return the path associated with this document. */ public String getPath() { return path; } /** * Return a prefix that all fields in this document should have. */ public String getPrefix() { return prefix; } /** * Return the parent document, or null if this is the root document. */ public Document getParent() { return parent; } @Override public Iterator<IndexableField> iterator() { return fields.iterator(); } public List<IndexableField> getFields() { return fields; } public void add(IndexableField field) { // either a meta fields or starts with the prefix assert field.name().startsWith("_") || field.name().startsWith(prefix) : field.name() + " " + prefix; fields.add(field); } /** Add fields so that they can later be fetched using {@link #getByKey(Object)}. */ public void addWithKey(Object key, IndexableField field) { if (keyedFields == null) { keyedFields = new ObjectObjectOpenHashMap<>(); } else if (keyedFields.containsKey(key)) { throw new ElasticsearchIllegalStateException("Only one field can be stored per key"); } keyedFields.put(key, field); add(field); } /** Get back fields that have been previously added with {@link #addWithKey(Object, IndexableField)}. */ public IndexableField getByKey(Object key) { return keyedFields == null ? null : keyedFields.get(key); } public IndexableField[] getFields(String name) { List<IndexableField> f = new ArrayList<>(); for (IndexableField field : fields) { if (field.name().equals(name)) { f.add(field); } } return f.toArray(new IndexableField[f.size()]); } /** * Returns an array of values of the field specified as the method parameter. * This method returns an empty array when there are no * matching fields. It never returns null. * For {@link org.apache.lucene.document.IntField}, {@link org.apache.lucene.document.LongField}, {@link * org.apache.lucene.document.FloatField} and {@link org.apache.lucene.document.DoubleField} it returns the string value of the number. * If you want the actual numeric field instances back, use {@link #getFields}. * @param name the name of the field * @return a <code>String[]</code> of field values */ public final String[] getValues(String name) { List<String> result = new ArrayList<>(); for (IndexableField field : fields) { if (field.name().equals(name) && field.stringValue() != null) { result.add(field.stringValue()); } } return result.toArray(new String[result.size()]); } public IndexableField getField(String name) { for (IndexableField field : fields) { if (field.name().equals(name)) { return field; } } return null; } public String get(String name) { for (IndexableField f : fields) { if (f.name().equals(name) && f.stringValue() != null) { return f.stringValue(); } } return null; } public BytesRef getBinaryValue(String name) { for (IndexableField f : fields) { if (f.name().equals(name) && f.binaryValue() != null) { return f.binaryValue(); } } return null; } } private static class FilterParseContext extends ParseContext { private final ParseContext in; private FilterParseContext(ParseContext in) { this.in = in; } @Override public boolean flyweight() { return in.flyweight(); } @Override public DocumentMapperParser docMapperParser() { return in.docMapperParser(); } @Override public boolean mappingsModified() { return in.mappingsModified(); } @Override public void setMappingsModified() { in.setMappingsModified(); } @Override public void setWithinNewMapper() { in.setWithinNewMapper(); } @Override public void clearWithinNewMapper() { in.clearWithinNewMapper(); } @Override public boolean isWithinNewMapper() { return in.isWithinNewMapper(); } @Override public boolean isWithinCopyTo() { return in.isWithinCopyTo(); } @Override public boolean isWithinMultiFields() { return in.isWithinMultiFields(); } @Override public String index() { return in.index(); } @Override public Settings indexSettings() { return in.indexSettings(); } @Override public String type() { return in.type(); } @Override public SourceToParse sourceToParse() { return in.sourceToParse(); } @Override public BytesReference source() { return in.source(); } @Override public void source(BytesReference source) { in.source(source); } @Override public ContentPath path() { return in.path(); } @Override public XContentParser parser() { return in.parser(); } @Override public ParseListener listener() { return in.listener(); } @Override public Document rootDoc() { return in.rootDoc(); } @Override public List<Document> docs() { return in.docs(); } @Override public Document doc() { return in.doc(); } @Override public void addDoc(Document doc) { in.addDoc(doc); } @Override public RootObjectMapper root() { return in.root(); } @Override public DocumentMapper docMapper() { return in.docMapper(); } @Override public AnalysisService analysisService() { return in.analysisService(); } @Override public String id() { return in.id(); } @Override public void ignoredValue(String indexName, String value) { in.ignoredValue(indexName, value); } @Override public String ignoredValue(String indexName) { return in.ignoredValue(indexName); } @Override public void id(String id) { in.id(id); } @Override public Field uid() { return in.uid(); } @Override public void uid(Field uid) { in.uid(uid); } @Override public Field version() { return in.version(); } @Override public void version(Field version) { in.version(version); } @Override public AllEntries allEntries() { return in.allEntries(); } @Override public boolean externalValueSet() { return in.externalValueSet(); } @Override public Object externalValue() { return in.externalValue(); } @Override public float docBoost() { return in.docBoost(); } @Override public void docBoost(float docBoost) { in.docBoost(docBoost); } @Override public StringBuilder stringBuilder() { return in.stringBuilder(); } } public static class InternalParseContext extends ParseContext { private final DocumentMapper docMapper; private final DocumentMapperParser docMapperParser; private final ContentPath path; private XContentParser parser; private Document document; private List<Document> documents = Lists.newArrayList(); private final String index; @Nullable private final Settings indexSettings; private SourceToParse sourceToParse; private BytesReference source; private String id; private DocumentMapper.ParseListener listener; private Field uid, version; private StringBuilder stringBuilder = new StringBuilder(); private Map<String, String> ignoredValues = new HashMap<>(); private boolean mappingsModified = false; private boolean withinNewMapper = false; private AllEntries allEntries = new AllEntries(); private float docBoost = 1.0f; public InternalParseContext(String index, @Nullable Settings indexSettings, DocumentMapperParser docMapperParser, DocumentMapper docMapper, ContentPath path) { this.index = index; this.indexSettings = indexSettings; this.docMapper = docMapper; this.docMapperParser = docMapperParser; this.path = path; } public void reset(XContentParser parser, Document document, SourceToParse source, DocumentMapper.ParseListener listener) { this.parser = parser; this.document = document; if (document != null) { this.documents = Lists.newArrayList(); this.documents.add(document); } else { this.documents = null; } this.uid = null; this.version = null; this.id = null; this.sourceToParse = source; this.source = source == null ? null : sourceToParse.source(); this.path.reset(); this.mappingsModified = false; this.withinNewMapper = false; this.listener = listener == null ? DocumentMapper.ParseListener.EMPTY : listener; this.allEntries = new AllEntries(); this.ignoredValues.clear(); this.docBoost = 1.0f; } public boolean flyweight() { return sourceToParse.flyweight(); } public DocumentMapperParser docMapperParser() { return this.docMapperParser; } public boolean mappingsModified() { return this.mappingsModified; } public void setMappingsModified() { this.mappingsModified = true; } public void setWithinNewMapper() { this.withinNewMapper = true; } public void clearWithinNewMapper() { this.withinNewMapper = false; } public boolean isWithinNewMapper() { return withinNewMapper; } public String index() { return this.index; } @Nullable public Settings indexSettings() { return this.indexSettings; } public String type() { return sourceToParse.type(); } public SourceToParse sourceToParse() { return this.sourceToParse; } public BytesReference source() { return source; } // only should be used by SourceFieldMapper to update with a compressed source public void source(BytesReference source) { this.source = source; } public ContentPath path() { return this.path; } public XContentParser parser() { return this.parser; } public DocumentMapper.ParseListener listener() { return this.listener; } public Document rootDoc() { return documents.get(0); } public List<Document> docs() { return this.documents; } public Document doc() { return this.document; } public void addDoc(Document doc) { this.documents.add(doc); } public RootObjectMapper root() { return docMapper.root(); } public DocumentMapper docMapper() { return this.docMapper; } public AnalysisService analysisService() { return docMapperParser.analysisService; } public String id() { return id; } public void ignoredValue(String indexName, String value) { ignoredValues.put(indexName, value); } public String ignoredValue(String indexName) { return ignoredValues.get(indexName); } /** * Really, just the id mapper should set this. */ public void id(String id) { this.id = id; } public Field uid() { return this.uid; } /** * Really, just the uid mapper should set this. */ public void uid(Field uid) { this.uid = uid; } public Field version() { return this.version; } public void version(Field version) { this.version = version; } public AllEntries allEntries() { return this.allEntries; } public float docBoost() { return this.docBoost; } public void docBoost(float docBoost) { this.docBoost = docBoost; } /** * A string builder that can be used to construct complex names for example. * Its better to reuse the. */ public StringBuilder stringBuilder() { stringBuilder.setLength(0); return this.stringBuilder; } } public abstract boolean flyweight(); public abstract DocumentMapperParser docMapperParser(); public abstract boolean mappingsModified(); public abstract void setMappingsModified(); public abstract void setWithinNewMapper(); public abstract void clearWithinNewMapper(); public abstract boolean isWithinNewMapper(); /** * Return a new context that will be within a copy-to operation. */ public final ParseContext createCopyToContext() { return new FilterParseContext(this) { @Override public boolean isWithinCopyTo() { return true; } }; } public boolean isWithinCopyTo() { return false; } /** * Return a new context that will be within multi-fields. */ public final ParseContext createMultiFieldContext() { return new FilterParseContext(this) { @Override public boolean isWithinMultiFields() { return true; } }; } /** * Return a new context that will be used within a nested document. */ public final ParseContext createNestedContext(String fullPath) { final Document doc = new Document(fullPath, doc()); addDoc(doc); return switchDoc(doc); } /** * Return a new context that has the provided document as the current document. */ public final ParseContext switchDoc(final Document document) { return new FilterParseContext(this) { @Override public Document doc() { return document; } }; } /** * Return a new context that will have the provided path. */ public final ParseContext overridePath(final ContentPath path) { return new FilterParseContext(this) { @Override public ContentPath path() { return path; } }; } public boolean isWithinMultiFields() { return false; } public abstract String index(); @Nullable public abstract Settings indexSettings(); public abstract String type(); public abstract SourceToParse sourceToParse(); public abstract BytesReference source(); // only should be used by SourceFieldMapper to update with a compressed source public abstract void source(BytesReference source); public abstract ContentPath path(); public abstract XContentParser parser(); public abstract DocumentMapper.ParseListener listener(); public abstract Document rootDoc(); public abstract List<Document> docs(); public abstract Document doc(); public abstract void addDoc(Document doc); public abstract RootObjectMapper root(); public abstract DocumentMapper docMapper(); public abstract AnalysisService analysisService(); public abstract String id(); public abstract void ignoredValue(String indexName, String value); public abstract String ignoredValue(String indexName); /** * Really, just the id mapper should set this. */ public abstract void id(String id); public abstract Field uid(); /** * Really, just the uid mapper should set this. */ public abstract void uid(Field uid); public abstract Field version(); public abstract void version(Field version); public final boolean includeInAll(Boolean includeInAll, FieldMapper mapper) { return includeInAll(includeInAll, mapper.fieldType().indexOptions() != IndexOptions.NONE); } /** * Is all included or not. Will always disable it if {@link org.elasticsearch.index.mapper.internal.AllFieldMapper#enabled()} * is <tt>false</tt>. If its enabled, then will return <tt>true</tt> only if the specific flag is <tt>null</tt> or * its actual value (so, if not set, defaults to "true") and the field is indexed. */ private boolean includeInAll(Boolean specificIncludeInAll, boolean indexed) { if (isWithinCopyTo()) { return false; } if (isWithinMultiFields()) { return false; } if (!docMapper().allFieldMapper().enabled()) { return false; } // not explicitly set if (specificIncludeInAll == null) { return indexed; } return specificIncludeInAll; } public abstract AllEntries allEntries(); /** * Return a new context that will have the external value set. */ public final ParseContext createExternalValueContext(final Object externalValue) { return new FilterParseContext(this) { @Override public boolean externalValueSet() { return true; } @Override public Object externalValue() { return externalValue; } }; } public boolean externalValueSet() { return false; } public Object externalValue() { throw new ElasticsearchIllegalStateException("External value is not set"); } /** * Try to parse an externalValue if any * @param clazz Expected class for external value * @return null if no external value has been set or the value */ public final <T> T parseExternalValue(Class<T> clazz) { if (!externalValueSet() || externalValue() == null) { return null; } if (!clazz.isInstance(externalValue())) { throw new ElasticsearchIllegalArgumentException("illegal external value class [" + externalValue().getClass().getName() + "]. Should be " + clazz.getName()); } return clazz.cast(externalValue()); } public abstract float docBoost(); public abstract void docBoost(float docBoost); /** * A string builder that can be used to construct complex names for example. * Its better to reuse the. */ public abstract StringBuilder stringBuilder(); }
package org.oskari.wcs.request; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import org.oskari.utils.common.StringUtils; import org.oskari.wcs.WCS; import org.oskari.wcs.capabilities.Capabilities; import org.oskari.wcs.coverage.CoverageDescription; import org.oskari.wcs.coverage.RectifiedGridCoverage; import org.oskari.wcs.extension.Interpolation; import org.oskari.wcs.extension.scaling.ScaleAxesByFactor; import org.oskari.wcs.extension.scaling.ScaleAxis; import org.oskari.wcs.extension.scaling.ScaleByFactor; import org.oskari.wcs.extension.scaling.ScaleToExtent; import org.oskari.wcs.extension.scaling.ScaleToSize; import org.oskari.wcs.extension.scaling.Scaling; import org.oskari.wcs.extension.scaling.TargetAxisExtent; import org.oskari.wcs.extension.scaling.TargetAxisSize; public class GetCoverage { public static final String MEDIA_TYPE_MULTI_PART = "multipart/related"; private final Capabilities wcs; private final CoverageDescription desc; private final String coverageId; private final String format; private final List<String> subset; private boolean multiPart; private Interpolation interpolation; private List<String> interpolationPerAxis; private String subsettingCRS; private String outputCRS; private Scaling scaling; public GetCoverage(Capabilities wcs, CoverageDescription desc) { this(wcs, desc, desc.getNativeFormat()); } public GetCoverage(Capabilities wcs, CoverageDescription desc, String format) throws IllegalArgumentException { if (!wcs.supportsFormat(format)) { throw new IllegalArgumentException("Invalid format"); } this.wcs = wcs; this.desc = desc; this.coverageId = desc.getCoverageId(); this.format = format; this.subset = new ArrayList<>(); } public GetCoverage multiPart() { this.multiPart = true; return this; } public GetCoverage subset(String dimension, int low, int high) { checkDimension(dimension); subset.add(String.format("%s(%d,%d)", dimension, low, high)); return this; } public GetCoverage subset(String dimension, int point) { checkDimension(dimension); subset.add(String.format("%s(%d)", dimension, point)); return this; } public GetCoverage subset(String dimension, double low, double high) { checkDimension(dimension); subset.add(String.format(Locale.US, "%s(%f,%f)", dimension, low, high)); return this; } public GetCoverage subset(String dimension, double point) { checkDimension(dimension); subset.add(String.format(Locale.US, "%s(%f)", dimension, point)); return this; } private void checkDimension(String dimension) { if (!desc.hasAxis(dimension)) { throw new UnsupportedOperationException(); } } public GetCoverage multipart(boolean multiPart) { this.multiPart = multiPart; return this; } public GetCoverage interpolation(Interpolation interp) { if (!wcs.supportsInterpolation() || !wcs.supportsInterpolation(interp)) { throw new UnsupportedOperationException(); } this.interpolation = interp; return this; } public GetCoverage interpolationPerAxis(String axis, Interpolation interp) { if (!wcs.supportsInterpolation() || !wcs.supportsInterpolation(interp) || !desc.hasAxis(axis)) { throw new UnsupportedOperationException(); } if (interpolationPerAxis == null) { interpolationPerAxis = new ArrayList<>(); } interpolationPerAxis.add(String.format("%s,%s", axis, interp.method)); return this; } public GetCoverage subsettingCRS(String crs) { if (!wcs.supportsCRS() || !wcs.supportsCRS(crs)) { throw new UnsupportedOperationException(); } this.subsettingCRS = crs; return this; } public GetCoverage outputCRS(String crs) { if (!wcs.supportsCRS() || !wcs.supportsCRS(crs)) { throw new UnsupportedOperationException(); } this.outputCRS = crs; return this; } public GetCoverage scaling(Scaling scaling) { // TODO: once we support other types of GridCoverages should be removed if (!(desc instanceof RectifiedGridCoverage)) { throw new IllegalArgumentException("Scaling extension only defined for Grid based coverages"); } // TODO: check that axes exist this.scaling = scaling; return this; } public Map<String, String[]> toKVP() { Map<String, String[]> kvp = new HashMap<>(); put(kvp, "service", "WCS"); put(kvp, "version", WCS.VERSION_201); put(kvp, "request", "GetCoverage"); put(kvp, "coverageId", coverageId); put(kvp, "format", format); if (multiPart) { put(kvp, "mediaType", MEDIA_TYPE_MULTI_PART); } if (subset.size() > 0) { kvp.put("subset", subset.toArray(new String[subset.size()])); } if (interpolation != null) { put(kvp, "interpolation", interpolation.method); } if (interpolationPerAxis != null) { kvp.put("interpolationPerAxis", interpolationPerAxis.toArray(new String[interpolationPerAxis.size()])); } if (subsettingCRS != null) { put(kvp, "subsettingCRS", subsettingCRS); } if (outputCRS != null) { put(kvp, "outputCRS", outputCRS); } if (scaling != null) { if (scaling instanceof ScaleByFactor) { ScaleByFactor sbf = (ScaleByFactor) scaling; put(kvp, "SCALEFACTOR", Double.toString(sbf.scaleFactor)); } else if (scaling instanceof ScaleAxesByFactor) { put(kvp, "SCALEAXES", scaleAxesKVP((ScaleAxesByFactor) scaling)); } else if (scaling instanceof ScaleToSize) { put(kvp, "SCALESIZE", scaleAxesKVP((ScaleToSize) scaling)); } else if (scaling instanceof ScaleToExtent) { put(kvp, "SCALEEXTENT", scaleAxesKVP((ScaleToExtent) scaling)); } } return kvp; } private static void put(Map<String, String[]> map, String key, String value) { map.put(key, new String[] { value }); } /** * SCALEAXES=a1(s1),...,an(sn) where, for 1<=i<=n, * - ai is an axis abbreviation; * - si is a scaleFactor expressed as the ASCII representation of a positive floating-point number */ private static String scaleAxesKVP(ScaleAxesByFactor sabf) { ScaleAxis[] scaleAxes = sabf.scaleAxes; int n = scaleAxes.length; String[] values = new String[n]; for (int i = 0; i < n; i++) { values[i] = String.format(Locale.US, "%s(%f)", scaleAxes[i].axis, scaleAxes[i].scaleFactor); } return StringUtils.join(values, ','); } /** * SCALESIZE=a1(s1),...,an(sn) where, for 1<=i<=n, * - ai is an axis abbreviation; * - si are sizes */ private static String scaleAxesKVP(ScaleToSize sts) { TargetAxisSize[] targetAxisSizes = sts.targetAxisSizes; int n = targetAxisSizes.length; String[] values = new String[n]; for (int i = 0; i < n; i++) { values[i] = String.format("%s(%d)", targetAxisSizes[i].axis, targetAxisSizes[i].targetSize); } return StringUtils.join(values, ','); } /** * SCALEEXTENT=a1(lo1:hi1),...,an(lon:hin) where, for 1<=i<=n, * - ai is an axis abbreviation; * - loi and hii are low and high, respectively, each of them represented as either a string, * enclosed in double quotes, or a number */ private static String scaleAxesKVP(ScaleToExtent ste) { TargetAxisExtent[] scaleAxes = ste.axisExtents; int n = scaleAxes.length; String[] values = new String[n]; for (int i = 0; i < n; i++) { values[i] = String.format(Locale.US, "%s(%f:%f)", scaleAxes[i].axis, scaleAxes[i].low, scaleAxes[i].high); } return StringUtils.join(values, ','); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.model; import java.util.concurrent.ExecutorService; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlTransient; import org.apache.camel.Expression; import org.apache.camel.Processor; import org.apache.camel.model.language.ExpressionDefinition; import org.apache.camel.processor.Splitter; import org.apache.camel.processor.SubUnitOfWorkProcessor; import org.apache.camel.processor.aggregate.AggregationStrategy; import org.apache.camel.spi.RouteContext; import org.apache.camel.util.CamelContextHelper; import org.apache.camel.util.concurrent.ExecutorServiceHelper; /** * Represents an XML &lt;split/&gt; element * * @version */ @XmlRootElement(name = "split") @XmlAccessorType(XmlAccessType.FIELD) public class SplitDefinition extends ExpressionNode implements ExecutorServiceAwareDefinition<SplitDefinition> { @XmlTransient private AggregationStrategy aggregationStrategy; @XmlTransient private ExecutorService executorService; @XmlAttribute private Boolean parallelProcessing; @XmlAttribute private String strategyRef; @XmlAttribute private String executorServiceRef; @XmlAttribute private Boolean streaming; @XmlAttribute private Boolean stopOnException; @XmlAttribute private Long timeout; @XmlAttribute private String onPrepareRef; @XmlTransient private Processor onPrepare; @XmlAttribute private Boolean shareUnitOfWork; public SplitDefinition() { } public SplitDefinition(Expression expression) { super(expression); } public SplitDefinition(ExpressionDefinition expression) { super(expression); } @Override public String toString() { return "Split[" + getExpression() + " -> " + getOutputs() + "]"; } @Override public String getShortName() { return "split"; } @Override public String getLabel() { return "split"; } @Override public Processor createProcessor(RouteContext routeContext) throws Exception { Processor childProcessor = this.createChildProcessor(routeContext, true); aggregationStrategy = createAggregationStrategy(routeContext); executorService = ExecutorServiceHelper.getConfiguredExecutorService(routeContext, "Split", this); if (isParallelProcessing() && executorService == null) { // we are running in parallel so create a cached thread pool which grows/shrinks automatic executorService = routeContext.getCamelContext().getExecutorServiceStrategy().newDefaultThreadPool(this, "Split"); } long timeout = getTimeout() != null ? getTimeout() : 0; if (timeout > 0 && !isParallelProcessing()) { throw new IllegalArgumentException("Timeout is used but ParallelProcessing has not been enabled."); } if (onPrepareRef != null) { onPrepare = CamelContextHelper.mandatoryLookup(routeContext.getCamelContext(), onPrepareRef, Processor.class); } Expression exp = getExpression().createExpression(routeContext); Splitter answer = new Splitter(routeContext.getCamelContext(), exp, childProcessor, aggregationStrategy, isParallelProcessing(), executorService, isStreaming(), isStopOnException(), timeout, onPrepare, isShareUnitOfWork()); if (isShareUnitOfWork()) { // wrap answer in a sub unit of work, since we share the unit of work return new SubUnitOfWorkProcessor(answer); } return answer; } private AggregationStrategy createAggregationStrategy(RouteContext routeContext) { AggregationStrategy strategy = getAggregationStrategy(); if (strategy == null && strategyRef != null) { strategy = CamelContextHelper.mandatoryLookup(routeContext.getCamelContext(), strategyRef, AggregationStrategy.class); } return strategy; } // Fluent API // ------------------------------------------------------------------------- /** * Set the aggregationStrategy * * @return the builder */ public SplitDefinition aggregationStrategy(AggregationStrategy aggregationStrategy) { setAggregationStrategy(aggregationStrategy); return this; } /** * Set the aggregationStrategy * * @param aggregationStrategyRef a reference to a strategy to lookup * @return the builder */ public SplitDefinition aggregationStrategyRef(String aggregationStrategyRef) { setStrategyRef(aggregationStrategyRef); return this; } /** * Doing the splitting work in parallel * * @return the builder */ public SplitDefinition parallelProcessing() { setParallelProcessing(true); return this; } /** * Enables streaming. * See {@link org.apache.camel.model.SplitDefinition#isStreaming()} for more information * * @return the builder */ public SplitDefinition streaming() { setStreaming(true); return this; } /** * Will now stop further processing if an exception or failure occurred during processing of an * {@link org.apache.camel.Exchange} and the caused exception will be thrown. * <p/> * Will also stop if processing the exchange failed (has a fault message) or an exception * was thrown and handled by the error handler (such as using onException). In all situations * the splitter will stop further processing. This is the same behavior as in pipeline, which * is used by the routing engine. * <p/> * The default behavior is to <b>not</b> stop but continue processing till the end * * @return the builder */ public SplitDefinition stopOnException() { setStopOnException(true); return this; } public SplitDefinition executorService(ExecutorService executorService) { setExecutorService(executorService); return this; } public SplitDefinition executorServiceRef(String executorServiceRef) { setExecutorServiceRef(executorServiceRef); return this; } /** * Uses the {@link Processor} when preparing the {@link org.apache.camel.Exchange} to be send. * This can be used to deep-clone messages that should be send, or any custom logic needed before * the exchange is send. * * @param onPrepare the processor * @return the builder */ public SplitDefinition onPrepare(Processor onPrepare) { setOnPrepare(onPrepare); return this; } /** * Uses the {@link Processor} when preparing the {@link org.apache.camel.Exchange} to be send. * This can be used to deep-clone messages that should be send, or any custom logic needed before * the exchange is send. * * @param onPrepareRef reference to the processor to lookup in the {@link org.apache.camel.spi.Registry} * @return the builder */ public SplitDefinition onPrepareRef(String onPrepareRef) { setOnPrepareRef(onPrepareRef); return this; } /** * Sets a timeout value in millis to use when using parallelProcessing. * * @param timeout timeout in millis * @return the builder */ public SplitDefinition timeout(long timeout) { setTimeout(timeout); return this; } /** * Shares the {@link org.apache.camel.spi.UnitOfWork} with the parent and each of the sub messages. * * @return the builder. * @see org.apache.camel.spi.SubUnitOfWork */ public SplitDefinition shareUnitOfWork() { setShareUnitOfWork(true); return this; } // Properties //------------------------------------------------------------------------- public AggregationStrategy getAggregationStrategy() { return aggregationStrategy; } public void setAggregationStrategy(AggregationStrategy aggregationStrategy) { this.aggregationStrategy = aggregationStrategy; } public Boolean getParallelProcessing() { return parallelProcessing; } public void setParallelProcessing(Boolean parallelProcessing) { this.parallelProcessing = parallelProcessing; } public boolean isParallelProcessing() { return parallelProcessing != null && parallelProcessing; } public Boolean getStreaming() { return streaming; } public void setStreaming(Boolean streaming) { this.streaming = streaming; } /** * The splitter should use streaming -- exchanges are being sent as the data for them becomes available. * This improves throughput and memory usage, but it has a drawback: * - the sent exchanges will no longer contain the {@link org.apache.camel.Exchange#SPLIT_SIZE} header property * * @return whether or not streaming should be used */ public boolean isStreaming() { return streaming != null && streaming; } public Boolean getStopOnException() { return stopOnException; } public void setStopOnException(Boolean stopOnException) { this.stopOnException = stopOnException; } public Boolean isStopOnException() { return stopOnException != null && stopOnException; } public ExecutorService getExecutorService() { return executorService; } public void setExecutorService(ExecutorService executorService) { this.executorService = executorService; } public String getStrategyRef() { return strategyRef; } public void setStrategyRef(String strategyRef) { this.strategyRef = strategyRef; } public String getExecutorServiceRef() { return executorServiceRef; } public void setExecutorServiceRef(String executorServiceRef) { this.executorServiceRef = executorServiceRef; } public Long getTimeout() { return timeout != null ? timeout : 0; } public void setTimeout(Long timeout) { this.timeout = timeout; } public String getOnPrepareRef() { return onPrepareRef; } public void setOnPrepareRef(String onPrepareRef) { this.onPrepareRef = onPrepareRef; } public Processor getOnPrepare() { return onPrepare; } public void setOnPrepare(Processor onPrepare) { this.onPrepare = onPrepare; } public Boolean getShareUnitOfWork() { return shareUnitOfWork; } public void setShareUnitOfWork(Boolean shareUnitOfWork) { this.shareUnitOfWork = shareUnitOfWork; } public boolean isShareUnitOfWork() { return shareUnitOfWork != null && shareUnitOfWork; } }
/* JAT: Java Astrodynamics Toolkit * * Copyright (c) 2002 National Aeronautics and Space Administration and the Center for Space Research (CSR), * The University of Texas at Austin. All rights reserved. * * This file is part of JAT. JAT is free software; you can * redistribute it and/or modify it under the terms of the * NASA Open Source Agreement * * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * NASA Open Source Agreement for more details. * * You should have received a copy of the NASA Open Source Agreement * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. * */ package jat.examplesNOSA.AttitudeExample; import jat.coreNOSA.algorithm.integrators.EquationsOfMotion; import jat.coreNOSA.algorithm.integrators.RungeKutta8; import jat.coreNOSA.plotutil.SinglePlot; import jat.coreNOSA.plotutil.ThreePlots; import jat.coreNOSA.plotutil.TwoPlots; /** * <p>This class demonstrates a way to do a simple s/c attitude simulation. * The simulated s/c is assumed to be rigid. * A circular orbit with gravity gradient effect is the torque environment for * the spacecraft. </p> * * Damping is provided by a damper spherical in shape. <br> * c = damping coefficient <br> * j = spherical damper inertia <br> <br> * * If the main()is assumed to be a client to JAT, the steps taken to do the * simulation is as follows: <br> * 1. Create an object of an integrator (RungeKutta8) <br> * 2. Create an object of the class containing EOM <br> * 3. Initialize variables <br> * 4. Set the start & end time of the simulation <br> * 5. Integrate the EOM <br> * 6. Gets data at each time-step <br> * 7. Make plots visible <br> * * @author <a href="mailto:ntakada@users.sourceforge.net"> Noriko Takada * @version 1.6 * * * */ public class RigidSphericalDamperC implements EquationsOfMotion{ ThreePlots rotation_plot = new ThreePlots(); ThreePlots angle_plot = new ThreePlots(); SinglePlot quarternion_check = new SinglePlot(); TwoPlots quarternion_plot1 = new TwoPlots(); TwoPlots quarternion_plot2 = new TwoPlots(); public static final double PI = 3.14159; /** Creates a new instance of SimpleIntegrator */ public RigidSphericalDamperC() { // set up the trajectory plot rotation_plot.setTitle("Angular Velocity"); rotation_plot.topPlot.setXLabel("# of orbits"); rotation_plot.topPlot.setYLabel("w1(rad/sec)"); rotation_plot.middlePlot.setXLabel("# of orbits"); rotation_plot.middlePlot.setYLabel("w2(rad/sec)"); rotation_plot.bottomPlot.setXLabel("# of orbits"); rotation_plot.bottomPlot.setYLabel("w3(rad/sec)"); quarternion_plot1.setTitle("Quaternions: e1 & e2"); quarternion_plot1.topPlot.setXLabel("# of orbits"); quarternion_plot1.topPlot.setYLabel("e1"); quarternion_plot1.bottomPlot.setXLabel("# of orbits"); quarternion_plot1.bottomPlot.setYLabel("e2"); quarternion_plot2.setTitle("Quaternions: e3 & e4"); quarternion_plot2.topPlot.setXLabel("# of orbits"); quarternion_plot2.topPlot.setYLabel("e1"); quarternion_plot2.bottomPlot.setXLabel("# of orbits"); quarternion_plot2.bottomPlot.setYLabel("e2"); angle_plot.setTitle("Angles between B and A frame"); angle_plot.topPlot.setXLabel("# of orbits"); angle_plot.topPlot.setYLabel("angle between a1 & b1 (rad)"); angle_plot.middlePlot.setXLabel("# of orbits"); angle_plot.middlePlot.setYLabel("angle between a2 & b2 (rad)"); angle_plot.bottomPlot.setXLabel("# of orbits"); angle_plot.bottomPlot.setYLabel("angle between a3 & b3 (rad)"); quarternion_check.setTitle("Quarternion Check"); quarternion_check.plot.setXLabel("# of orbits"); quarternion_check.plot.setYLabel("e1^2 + e2^2 + e3^2 + e4^2"); } /** Compute the derivatives. * Equations of Motion * @param t double containing time or the independent variable. * @param x VectorN containing the required data. * @return double [] containing the derivatives. */ public double[] derivs(double t, double[] x) { double I1 = 10.42; // spacecraft inertia double I2 = 35.42; double I3 = 41.67; double c = 5; // damping coefficient double j = 5; // spherical damper inertia double c11 = 1- 2*( x[4]*x[4] + x[5]*x[5]); double c21 = 2* (x[3]*x[4]-x[5]*x[6]); double c31 = 2* (x[3]*x[5]+x[4]*x[6]); double [] out = new double[10]; out[0] = 2*PI*((I2-I3)/I1)* (x[1]*x[2] - 3*c21*c31) - 2*PI*(c/I1)*(x[0]-x[7]); out[1] = 2*PI*((I3-I1)/I2)* (x[0]*x[2] - 3*c31*c11) - 2*PI*(c/I2)*(x[1]-x[8]); out[2] = 2*PI*((I1-I2)/I3)* (x[0]*x[1] - 3*c11*c21) - 2*PI*(c/I3)*(x[2]-x[9]); out[3] = -PI* (-(x[2]+1)*x[4] + x[1]*x[5] - x[0]*x[6]); out[4] = -PI* ((x[2]+1)*x[3] - x[0]*x[5] - x[1]*x[6]); out[5] = -PI* (-(x[2]-1)*x[6] + x[0]*x[4] - x[1]*x[3]); out[6] = -PI* ((x[2]-1)*x[5] + x[1]*x[4] + x[0]*x[3]); out[7] = 2*PI*(c/j)*(x[0]-x[7]) -2*PI*(x[1]*x[9] - x[2]*x[8]); out[8] = 2*PI*(c/j)*(x[1]-x[8]) -2*PI*(x[2]*x[7] - x[0]*x[9]); out[9] = 2*PI*(c/j)*(x[2]-x[9]) -2*PI*(x[0]*x[8] - x[1]*x[7]); return out; } /** Implements the Printable interface to get the data out of the propagator and pass it to the plot. * This method is executed by the propagator at each integration step. * @param t Time. * @param y Data array. */ public void print(double t, double [] y){ // handle the first variable for plotting - this is a little mystery but it works boolean first = true; if (t == 0.0) first = false; double w1 = y[0]; double w2 = y[1]; double w3 = y[2]; double e1 = y[3]; double e2 = y[4]; double e3 = y[5]; double e4 = y[6]; double quat_check = e1*e1+e2*e2+e3*e3+e4*e4; // Calculate Transformation matrix elements double c11 = 1- 2*(e2*e2 + e3*e3); double c12 = 2* (e1*e2 + e3*e4); double c13 = 2* (e1*e3 - e2*e4); double c21 = 2* (e2*e1 - e3*e4); double c22 = 1- 2*(e3*e3 + e1*e1); double c23 = 2* (e2*e3 + e1*e4); double c31 = 2* (e3*e1 + e2*e4); double c32 = 2* (e3*e2 - e1*e4); double c33 = 1- 2*(e1*e1 + e2*e2); double angle11 = Math.toDegrees(Math.acos(c11)); double angle22 = Math.toDegrees(Math.acos(c22)); double angle33 = Math.toDegrees(Math.acos(c33)); // add data point to the plot rotation_plot.topPlot.addPoint(0, t, w1, first); rotation_plot.middlePlot.addPoint(0, t, w2, first); rotation_plot.bottomPlot.addPoint(0, t, w3, first); quarternion_check.plot.addPoint(0, t, quat_check, first); angle_plot.topPlot.addPoint(0, t, angle11, first); angle_plot.middlePlot.addPoint(0, t, angle22, first); angle_plot.bottomPlot.addPoint(0, t, angle33, first); quarternion_plot1.topPlot.addPoint(0, t, e1, first); quarternion_plot1.bottomPlot.addPoint(0,t,e2, first); quarternion_plot2.topPlot.addPoint(0,t,e3, first); quarternion_plot2.bottomPlot.addPoint(0,t,e4,first); // also print to the screen for System.out.println(t+" "+y[0]+" "+y[1]+" "+y[2]); } /** Runs the example. * @param args Arguments. */ public static void main(String[] args) { // create an RK8 integrator with step-size of 0.1 RungeKutta8 rk8 = new RungeKutta8(0.1); // create an instance RigidSphericalDamperC si = new RigidSphericalDamperC(); // initialize the variables double [] x0 = new double[10]; x0[0] = 0.5; x0[1] = 0.0; x0[2] = 1.0; x0[3] = 0.0; x0[4] = 0.0; x0[5] = 0.0; x0[6] = 1.0; x0[7] = 0.0; x0[8] = 0.0; x0[9] = 1.0; // set the final time double tf = 20.0; // set the initial time to zero double t0 = 0.0; // integrate the equations rk8.integrate(t0, x0, tf, si, true); // make the plot visible si.rotation_plot.setVisible(true); si.angle_plot.setVisible(true); si.quarternion_check.setVisible(true); si.quarternion_plot1.setVisible(true); si.quarternion_plot2.setVisible(true); } }
/* Jaivox Application Generator (JAG) version 0.2 March 2014 Copyright 2010-2014 by Bits and Pixels, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Please see work/licenses for licenses to other components included with this package. */ /* * To change this template, choose Tools | Templates * and open the template in the editor. */ package com.jaivox.ui.gengram; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.StringTokenizer; import java.util.TreeMap; import java.util.regex.Pattern; /** * * @author lin */ public class GrammarGenerator { public static String DLG_DLIM = "()\r\n"; static Parse P = null; ; static WnLink W; private static Map<String, WnLink> synRepos = null; static String tests[]; public static final String quoted = "([^\"]\\S*|\".+?\")\\s*"; public static final Pattern regxQuoted = Pattern.compile (quoted); public GrammarGenerator (String dataFolder) { //W.synsfile = dataFolder + W.synsfile; Parse.penntags = dataFolder + P.penntags; synRepos = new HashMap (); } public void setWLink (String key) { try { WnLink wl = synRepos.get (key); if (wl == null) { wl = (WnLink) Class.forName (key).newInstance (); } synRepos.put (key, wl); W = wl; } catch (Exception e) { e.printStackTrace (); } } public void load (String dlgFile, String datFile) { if (datFile != null) { W.addtablecolumn (datFile, ",\r\n", 3, 0); } if (dlgFile != null) { // if (P == null) { P = new Parse (dlgFile); // } if (P.Valid) { P.createsentences (); } } } public String[] getSynonyms (String word) { return W.getsynonyms (word); } public String[] getSynonyms (String word, String form) { return W.synsget (word, form); } public Sentence getSentence (String key) { return P.sentences.get (key); } public ArrayList<String> getParsedStatements () { return P.statements; } public void generate (String filename) { if (P == null || !P.Valid) { return; } TreeMap<String, Sentence> sentences = P.sentences; Set<String> keys = sentences.keySet (); int n = keys.size (); tests = keys.toArray (new String[n]); for (int i = 0; i < n; i++) { String key = tests[i]; Sentence s = sentences.get (key); // s.show (""+i+" "); // s.findmultiwords (W); s.multiwordsubs (P, W); } /* // generate using okays instead of subs for (int i=0; i<n; i++) { String key = tests [i]; Sentence s = sentences.get (key); System.out.println ("Sentence "+i+" Generating okays for: "+key); s.generateokays (); } */ } public ArrayList<String> parseDialog (String dlg) { ArrayList<String> sents = new ArrayList<String> (); StringTokenizer st = new StringTokenizer (dlg, DLG_DLIM); while (st.hasMoreTokens ()) { String token = st.nextToken ().trim (); if (token.length () == 0) { continue; } if (!token.endsWith ("?") && !token.endsWith (".")) { token = token + "."; } sents.add (token); } return sents; } public boolean addSynonyms (String word, String[] addsyns, String tag) { int news = 0; String[] ar = W.synsget (word, tag); ArrayList<String> arl = new ArrayList<String> (); if (ar != null) { arl.addAll (Arrays.asList (ar)); } for (String k : addsyns) { if (!arl.contains (k)) { arl.add (k); news++; } } //arl.addAll(Arrays.asList(addsyns)); ar = arl.toArray (new String[0]); W.synsput (word, ar, tag); ar = W.dbsyns.get (word); if (ar != null) { arl = new ArrayList<String> (); if (ar != null) { arl.addAll (Arrays.asList (ar)); } for (String k : addsyns) { if (!arl.contains (k)) { arl.add (k); } } //arl.addAll(Arrays.asList(addsyns)); ar = arl.toArray (new String[0]); W.dbsyns.put (word, ar); } //W.dumpSynonyms(); return news > 0; } public Sentence generateAlts (String key) { TreeMap<String, Sentence> sentences = P.sentences; Sentence old = sentences.get (key); if (old == null) { return null; } Sentence sent = new Sentence (old.orig, old.form, old.tree); sent.setSelectionhandler (old.getSelectionhandler ()); sent.multiwordsubs (P, W); sent.generateokays (); sentences.put (key, sent); return sent; } public SentenceX createSentence (String statement) { if (P == null) { P = new Parse (); } SentenceX sx = null; statement = Parse.padQuotes (GrammarGenerator.regxQuoted, statement); Sentence sent = P.doparse (statement); if (sent != null) { sx = sent == null ? null : new SentenceX (sent); P.sentences.put (sent.orig, sent); sent.multiwordsubs (P, W); //sent.generateokays (); } return sx; } public static void removeSentence (Object key) { P.sentences.remove (key); } public void removeSynonym (String word, String syn, String tag) { W.synsremove (word, syn, word); String[] ar = W.dbsyns.get (word); if (ar == null) { return; } ArrayList<String> arl = new ArrayList<String> (); arl.addAll (Arrays.asList (ar)); arl.remove (syn); ar = arl.toArray (new String[0]); W.dbsyns.put (word, ar); } }
/*** * ASM: a very small and fast Java bytecode manipulation framework * Copyright (c) 2000-2007 INRIA, France Telecom * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. */ package alterrs.asm.util; import java.io.FileInputStream; import java.io.PrintWriter; import alterrs.asm.AnnotationVisitor; import alterrs.asm.ClassReader; import alterrs.asm.ClassVisitor; import alterrs.asm.FieldVisitor; import alterrs.asm.MethodVisitor; import alterrs.asm.Opcodes; /** * A {@link alterrs.asm.ClassVisitor} that prints the ASM code that generates * the classes it visits. This class visitor can be used to quickly write ASM * code to generate some given bytecode: * <ul> * <li>write the Java source code equivalent to the bytecode you want to * generate;</li> * <li>compile it with <tt>javac</tt>;</li> * <li>make a {@link ASMifierClassVisitor} visit this compiled class (see the * {@link #main main} method);</li> * <li>edit the generated source code, if necessary.</li> * </ul> * The source code printed when visiting the <tt>Hello</tt> class is the * following: * <p> * <blockquote> * <p/> * * <pre> * import com.alterrs.asm.*; * * public class HelloDump implements Opcodes { * * public static byte[] dump() throws Exception { * * ClassWriter cw = new ClassWriter(0); * FieldVisitor fv; * MethodVisitor mv; * AnnotationVisitor av0; * * cw.visit(49, ACC_PUBLIC + ACC_SUPER, &quot;Hello&quot;, null, &quot;java/lang/Object&quot;, * null); * * cw.visitSource(&quot;Hello.java&quot;, null); * * { * mv = cw.visitMethod(ACC_PUBLIC, &quot;&lt;init&gt;&quot;, &quot;()V&quot;, null, null); * mv.visitVarInsn(ALOAD, 0); * mv.visitMethodInsn(INVOKESPECIAL, &quot;java/lang/Object&quot;, &quot;&lt;init&gt;&quot;, * &quot;()V&quot;); * mv.visitInsn(RETURN); * mv.visitMaxs(1, 1); * mv.visitEnd(); * } * { * mv = cw.visitMethod(ACC_PUBLIC + ACC_STATIC, &quot;main&quot;, * &quot;([Ljava/lang/String;)V&quot;, null, null); * mv.visitFieldInsn(GETSTATIC, &quot;java/lang/System&quot;, &quot;out&quot;, * &quot;Ljava/io/PrintStream;&quot;); * mv.visitLdcInsn(&quot;hello&quot;); * mv.visitMethodInsn(INVOKEVIRTUAL, &quot;java/io/PrintStream&quot;, &quot;println&quot;, * &quot;(Ljava/lang/String;)V&quot;); * mv.visitInsn(RETURN); * mv.visitMaxs(2, 1); * mv.visitEnd(); * } * cw.visitEnd(); * * return cw.toByteArray(); * } * } * * </pre> * <p/> * </blockquote> where <tt>Hello</tt> is defined by: * <p> * <blockquote> * <p/> * * <pre> * public class Hello { * * public static void main(String[] args) { * System.out.println(&quot;hello&quot;); * } * } * </pre> * <p/> * </blockquote> * * @author Eric Bruneton * @author Eugene Kuleshov */ public class ASMifierClassVisitor extends ASMifierAbstractVisitor implements ClassVisitor { /** * Pseudo access flag used to distinguish class access flags. */ private static final int ACCESS_CLASS = 262144; /** * Pseudo access flag used to distinguish field access flags. */ private static final int ACCESS_FIELD = 524288; /** * Pseudo access flag used to distinguish inner class flags. */ private static final int ACCESS_INNER = 1048576; /** * The print writer to be used to print the class. */ protected final PrintWriter pw; /** * Prints the ASM source code to generate the given class to the standard * output. * <p> * Usage: ASMifierClassVisitor [-debug] &lt;fully qualified class name or * class file name&gt; * * @param args * the command line arguments. * @throws Exception * if the class cannot be found, or if an IO exception occurs. */ public static void main(final String[] args) throws Exception { int i = 0; int flags = ClassReader.SKIP_DEBUG; boolean ok = true; if (args.length < 1 || args.length > 2) { ok = false; } if (ok && "-debug".equals(args[0])) { i = 1; flags = 0; if (args.length != 2) { ok = false; } } if (!ok) { System.err .println("Prints the ASM code to generate the given class."); System.err.println("Usage: ASMifierClassVisitor [-debug] " + "<fully qualified class name or class file name>"); return; } ClassReader cr; if (args[i].endsWith(".class") || args[i].indexOf('\\') > -1 || args[i].indexOf('/') > -1) { cr = new ClassReader(new FileInputStream(args[i])); } else { cr = new ClassReader(args[i]); } cr.accept(new ASMifierClassVisitor(new PrintWriter(System.out)), getDefaultAttributes(), flags); } /** * Constructs a new {@link ASMifierClassVisitor} object. * * @param pw * the print writer to be used to print the class. */ public ASMifierClassVisitor(final PrintWriter pw) { super("cw"); this.pw = pw; } // ------------------------------------------------------------------------ // Implementation of the ClassVisitor interface // ------------------------------------------------------------------------ public void visit(final int version, final int access, final String name, final String signature, final String superName, final String[] interfaces) { String simpleName; int n = name.lastIndexOf('/'); if (n == -1) { simpleName = name; } else { text.add("package asm." + name.substring(0, n).replace('/', '.') + ";\n"); simpleName = name.substring(n + 1); } text.add("import java.util.*;\n"); text.add("import com.alterrs.asm.*;\n"); text.add("import com.alterrs.asm.attrs.*;\n"); text.add("public class " + simpleName + "Dump implements Opcodes {\n\n"); text.add("public static byte[] dump () throws Exception {\n\n"); text.add("ClassWriter cw = new ClassWriter(0);\n"); text.add("FieldVisitor fv;\n"); text.add("MethodVisitor mv;\n"); text.add("AnnotationVisitor av0;\n\n"); buf.setLength(0); buf.append("cw.visit("); switch (version) { case Opcodes.V1_1: buf.append("V1_1"); break; case Opcodes.V1_2: buf.append("V1_2"); break; case Opcodes.V1_3: buf.append("V1_3"); break; case Opcodes.V1_4: buf.append("V1_4"); break; case Opcodes.V1_5: buf.append("V1_5"); break; case Opcodes.V1_6: buf.append("V1_6"); break; default: buf.append(version); break; } buf.append(", "); appendAccess(access | ACCESS_CLASS); buf.append(", "); appendConstant(name); buf.append(", "); appendConstant(signature); buf.append(", "); appendConstant(superName); buf.append(", "); if (interfaces != null && interfaces.length > 0) { buf.append("new String[] {"); for (int i = 0; i < interfaces.length; ++i) { buf.append(i == 0 ? " " : ", "); appendConstant(interfaces[i]); } buf.append(" }"); } else { buf.append("null"); } buf.append(");\n\n"); text.add(buf.toString()); } public void visitSource(final String file, final String debug) { buf.setLength(0); buf.append("cw.visitSource("); appendConstant(file); buf.append(", "); appendConstant(debug); buf.append(");\n\n"); text.add(buf.toString()); } public void visitOuterClass(final String owner, final String name, final String desc) { buf.setLength(0); buf.append("cw.visitOuterClass("); appendConstant(owner); buf.append(", "); appendConstant(name); buf.append(", "); appendConstant(desc); buf.append(");\n\n"); text.add(buf.toString()); } public void visitInnerClass(final String name, final String outerName, final String innerName, final int access) { buf.setLength(0); buf.append("cw.visitInnerClass("); appendConstant(name); buf.append(", "); appendConstant(outerName); buf.append(", "); appendConstant(innerName); buf.append(", "); appendAccess(access | ACCESS_INNER); buf.append(");\n\n"); text.add(buf.toString()); } public FieldVisitor visitField(final int access, final String name, final String desc, final String signature, final Object value) { buf.setLength(0); buf.append("{\n"); buf.append("fv = cw.visitField("); appendAccess(access | ACCESS_FIELD); buf.append(", "); appendConstant(name); buf.append(", "); appendConstant(desc); buf.append(", "); appendConstant(signature); buf.append(", "); appendConstant(value); buf.append(");\n"); text.add(buf.toString()); ASMifierFieldVisitor aav = new ASMifierFieldVisitor(); text.add(aav.getText()); text.add("}\n"); return aav; } public MethodVisitor visitMethod(final int access, final String name, final String desc, final String signature, final String[] exceptions) { buf.setLength(0); buf.append("{\n"); buf.append("mv = cw.visitMethod("); appendAccess(access); buf.append(", "); appendConstant(name); buf.append(", "); appendConstant(desc); buf.append(", "); appendConstant(signature); buf.append(", "); if (exceptions != null && exceptions.length > 0) { buf.append("new String[] {"); for (int i = 0; i < exceptions.length; ++i) { buf.append(i == 0 ? " " : ", "); appendConstant(exceptions[i]); } buf.append(" }"); } else { buf.append("null"); } buf.append(");\n"); text.add(buf.toString()); ASMifierMethodVisitor acv = createASMifierMethodVisitor(); text.add(acv.getText()); text.add("}\n"); return acv; } protected ASMifierMethodVisitor createASMifierMethodVisitor() { return new ASMifierMethodVisitor(); } public AnnotationVisitor visitAnnotation(final String desc, final boolean visible) { buf.setLength(0); buf.append("{\n"); buf.append("av0 = cw.visitAnnotation("); appendConstant(desc); buf.append(", "); buf.append(visible); buf.append(");\n"); text.add(buf.toString()); ASMifierAnnotationVisitor av = new ASMifierAnnotationVisitor(0); text.add(av.getText()); text.add("}\n"); return av; } public void visitEnd() { text.add("cw.visitEnd();\n\n"); text.add("return cw.toByteArray();\n"); text.add("}\n"); text.add("}\n"); printList(pw, text); pw.flush(); } // ------------------------------------------------------------------------ // Utility methods // ------------------------------------------------------------------------ /** * Appends a string representation of the given access modifiers to * {@link #buf buf}. * * @param access * some access modifiers. */ void appendAccess(final int access) { boolean first = true; if ((access & Opcodes.ACC_PUBLIC) != 0) { buf.append("ACC_PUBLIC"); first = false; } if ((access & Opcodes.ACC_PRIVATE) != 0) { buf.append("ACC_PRIVATE"); first = false; } if ((access & Opcodes.ACC_PROTECTED) != 0) { buf.append("ACC_PROTECTED"); first = false; } if ((access & Opcodes.ACC_FINAL) != 0) { if (!first) { buf.append(" + "); } buf.append("ACC_FINAL"); first = false; } if ((access & Opcodes.ACC_STATIC) != 0) { if (!first) { buf.append(" + "); } buf.append("ACC_STATIC"); first = false; } if ((access & Opcodes.ACC_SYNCHRONIZED) != 0) { if (!first) { buf.append(" + "); } if ((access & ACCESS_CLASS) == 0) { buf.append("ACC_SYNCHRONIZED"); } else { buf.append("ACC_SUPER"); } first = false; } if ((access & Opcodes.ACC_VOLATILE) != 0 && (access & ACCESS_FIELD) != 0) { if (!first) { buf.append(" + "); } buf.append("ACC_VOLATILE"); first = false; } if ((access & Opcodes.ACC_BRIDGE) != 0 && (access & ACCESS_CLASS) == 0 && (access & ACCESS_FIELD) == 0) { if (!first) { buf.append(" + "); } buf.append("ACC_BRIDGE"); first = false; } if ((access & Opcodes.ACC_VARARGS) != 0 && (access & ACCESS_CLASS) == 0 && (access & ACCESS_FIELD) == 0) { if (!first) { buf.append(" + "); } buf.append("ACC_VARARGS"); first = false; } if ((access & Opcodes.ACC_TRANSIENT) != 0 && (access & ACCESS_FIELD) != 0) { if (!first) { buf.append(" + "); } buf.append("ACC_TRANSIENT"); first = false; } if ((access & Opcodes.ACC_NATIVE) != 0 && (access & ACCESS_CLASS) == 0 && (access & ACCESS_FIELD) == 0) { if (!first) { buf.append(" + "); } buf.append("ACC_NATIVE"); first = false; } if ((access & Opcodes.ACC_ENUM) != 0 && ((access & ACCESS_CLASS) != 0 || (access & ACCESS_FIELD) != 0 || (access & ACCESS_INNER) != 0)) { if (!first) { buf.append(" + "); } buf.append("ACC_ENUM"); first = false; } if ((access & Opcodes.ACC_ANNOTATION) != 0 && (access & ACCESS_CLASS) != 0) { if (!first) { buf.append(" + "); } buf.append("ACC_ANNOTATION"); first = false; } if ((access & Opcodes.ACC_ABSTRACT) != 0) { if (!first) { buf.append(" + "); } buf.append("ACC_ABSTRACT"); first = false; } if ((access & Opcodes.ACC_INTERFACE) != 0) { if (!first) { buf.append(" + "); } buf.append("ACC_INTERFACE"); first = false; } if ((access & Opcodes.ACC_STRICT) != 0) { if (!first) { buf.append(" + "); } buf.append("ACC_STRICT"); first = false; } if ((access & Opcodes.ACC_SYNTHETIC) != 0) { if (!first) { buf.append(" + "); } buf.append("ACC_SYNTHETIC"); first = false; } if ((access & Opcodes.ACC_DEPRECATED) != 0) { if (!first) { buf.append(" + "); } buf.append("ACC_DEPRECATED"); first = false; } if (first) { buf.append('0'); } } }
/* // Licensed to Julian Hyde under one or more contributor license // agreements. See the NOTICE file distributed with this work for // additional information regarding copyright ownership. // // Julian Hyde licenses this file to you under the Apache License, // Version 2.0 (the "License"); you may not use this file except in // compliance with the License. You may obtain a copy of the License at: // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. */ package org.eigenbase.resgen; import java.text.MessageFormat; import java.text.Format; import java.text.NumberFormat; import java.text.DateFormat; import java.util.ResourceBundle; import java.util.Properties; import java.lang.reflect.Method; import java.lang.reflect.InvocationTargetException; /** * Definition of a resource such as a parameterized message or exception. * * <p>A resource is identified within a {@link ResourceBundle} by a text * <em>key</em>, and has a <em>message</em> in its base locale (which is * usually US-English (en_US)). It may also have a set of properties, which are * represented as name-value pairs. * * <p>A resource definition is immutable. * * @author jhyde */ public class ResourceDefinition { public final String key; public final String baseMessage; private final String[] props; private static final String[] EmptyStringArray = new String[0]; public static final int TYPE_UNKNOWN = -1; public static final int TYPE_STRING = 0; public static final int TYPE_NUMBER = 1; public static final int TYPE_DATE = 2; public static final int TYPE_TIME = 3; private static final String[] TypeNames = {"string", "number", "date", "time"}; /** * Creates a resource definition with no properties. * * @param key Unique name for this resource definition. * @param baseMessage Message for this resource definition in the base * locale. */ public ResourceDefinition(String key, String baseMessage) { this(key, baseMessage, null); } /** * Creates a resource definition. * * @param key Unique name for this resource definition. * @param baseMessage Message for this resource definition in the base * locale. * @param props Array of property name/value pairs. * <code>null</code> means the same as an empty array. */ public ResourceDefinition(String key, String baseMessage, String[] props) { this.key = key; this.baseMessage = baseMessage; if (props == null) { props = EmptyStringArray; } assert props.length % 2 == 0 : "Must have even number of property names/values"; this.props = props; } /** * Returns this resource definition's key. * * @return Key */ public String getKey() { return key; } /** * Returns this resource definition's message in the base locale. * (To find the message in another locale, you will need to load a * resource bundle for that locale.) * * @return Base message */ public String getBaseMessage() { return baseMessage; } /** * Returns the properties of this resource definition. * * @return Properties */ public Properties getProperties() { final Properties properties = new Properties(); for (int i = 0; i < props.length; i++) { String prop = props[i]; String value = props[++i]; properties.setProperty(prop, value); } return properties; } /** * Returns the types of arguments. * * @return Argument types */ public String[] getArgTypes() { return getArgTypes(baseMessage, TypeNames); } /** * Creates an instance of this definition with a set of parameters. * This is a factory method, which may be overridden by a derived class. * * @param bundle Resource bundle the resource instance will belong to * (This contains the locale, among other things.) * @param args Arguments to populate the message's parameters. * The arguments must be consistent in number and type with the results * of {@link #getArgTypes}. * @return Resource instance */ public ResourceInstance instantiate(ResourceBundle bundle, Object[] args) { return new Instance(bundle, this, args); } /** * Parses a message for the arguments inside it, and * returns an array with the types of those arguments. * * <p>For example, <code>getArgTypes("I bought {0,number} {2}s", * new String[] {"string", "number", "date", "time"})</code> * yields {"number", null, "string"}. * Note the null corresponding to missing message #1. * * @param message Message to be parsed. * @param typeNames Strings to return for types. * @return Array of type names */ protected static String[] getArgTypes(String message, String[] typeNames) { assert typeNames.length == 4; Format[] argFormats; try { // We'd like to do // argFormats = format.getFormatsByArgumentIndex() // but it doesn't exist until JDK 1.4, and we'd like this code // to work earlier. Method method = MessageFormat.class.getMethod( "getFormatsByArgumentIndex", (Class[]) null); try { MessageFormat format = new MessageFormat(message); argFormats = (Format[]) method.invoke(format, (Object[]) null); String[] argTypes = new String[argFormats.length]; for (int i = 0; i < argFormats.length; i++) { int x = formatToType(argFormats[i]); argTypes[i] = typeNames[x]; } return argTypes; } catch (IllegalAccessException e) { throw new RuntimeException(e.toString()); } catch (IllegalArgumentException e) { throw new RuntimeException(e.toString()); } catch (InvocationTargetException e) { throw new RuntimeException(e.toString()); } } catch (NoSuchMethodException e) { // Fallback pre JDK 1.4 return getArgTypesByHand(message, typeNames); } catch (SecurityException e) { throw new RuntimeException(e.toString()); } } protected static String [] getArgTypesByHand( String message, String[] typeNames) { assert typeNames.length == 4; String[] argTypes = new String[10]; int length = 0; for (int i = 0; i < 10; i++) { final int type = getArgType(i, message); if (type != TYPE_UNKNOWN) { length = i + 1; argTypes[i] = typeNames[type]; } } // Created a truncated copy (but keep intervening nulls). String[] argTypes2 = new String[length]; System.arraycopy(argTypes, 0, argTypes2, 0, length); return argTypes2; } /** * Returns the type of the <code>i</code>th argument inside a message, * or {@link #TYPE_UNKNOWN} if not found. * * @param i Ordinal of argument * @param message Message to parse * @return Type code ({@link #TYPE_STRING} etc.) */ protected static int getArgType(int i, String message) { String arg = "{" + Integer.toString(i); // e.g. "{1" int index = message.lastIndexOf(arg); if (index < 0) { return TYPE_UNKNOWN; } index += arg.length(); int end = message.length(); while (index < end && message.charAt(index) == ' ') { index++; } if (index < end && message.charAt(index) == ',') { index++; while (index < end && message.charAt(index) == ' ') { index++; } if (index < end) { String sub = message.substring(index); if (sub.startsWith("number")) { return TYPE_NUMBER; } else if (sub.startsWith("date")) { return TYPE_DATE; } else if (sub.startsWith("time")) { return TYPE_TIME; } else if (sub.startsWith("choice")) { return TYPE_UNKNOWN; } } } return TYPE_STRING; } /** * Converts a {@link Format} to a type code ({@link #TYPE_STRING} etc.) */ private static int formatToType(Format format) { if (format == null) { return TYPE_STRING; } else if (format instanceof NumberFormat) { return TYPE_NUMBER; } else if (format instanceof DateFormat) { // might be date or time, but assume it's date return TYPE_DATE; } else { return TYPE_STRING; } } /** * Default implementation of {@link ResourceInstance}. */ private static class Instance implements ResourceInstance { ResourceDefinition definition; ResourceBundle bundle; Object[] args; public Instance( ResourceBundle bundle, ResourceDefinition definition, Object[] args) { this.definition = definition; this.bundle = bundle; this.args = args; } public String toString() { String message = bundle.getString(definition.key); MessageFormat format = new MessageFormat(message); format.setLocale(bundle.getLocale()); String formattedMessage = format.format(args); return formattedMessage; } } } // End ResourceDefinition.java
/* * Copyright 2019-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.k8snetworking.web; import io.fabric8.kubernetes.client.KubernetesClient; import org.onlab.util.ItemNotFoundException; import org.onosproject.k8snetworking.api.K8sEndpointsAdminService; import org.onosproject.k8snetworking.api.K8sIngressAdminService; import org.onosproject.k8snetworking.api.K8sNamespaceAdminService; import org.onosproject.k8snetworking.api.K8sNetworkAdminService; import org.onosproject.k8snetworking.api.K8sNetworkPolicyAdminService; import org.onosproject.k8snetworking.api.K8sPodAdminService; import org.onosproject.k8snetworking.api.K8sServiceAdminService; import org.onosproject.k8snetworking.util.K8sNetworkingUtil; import org.onosproject.k8snode.api.K8sApiConfig; import org.onosproject.k8snode.api.K8sApiConfigService; import org.onosproject.k8snode.api.K8sNode; import org.onosproject.k8snode.api.K8sNodeAdminService; import org.onosproject.k8snode.api.K8sNodeState; import org.onosproject.rest.AbstractWebResource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import static java.lang.Thread.sleep; import static org.onosproject.k8snetworking.util.K8sNetworkingUtil.syncPortFromPod; import static org.onosproject.k8snode.api.K8sNode.Type.MASTER; import static org.onosproject.k8snode.api.K8sNode.Type.MINION; import static org.onosproject.k8snode.api.K8sNodeState.COMPLETE; /** * REST interface for synchronizing kubernetes network states and rules. */ @Path("management") public class K8sManagementWebResource extends AbstractWebResource { private final Logger log = LoggerFactory.getLogger(getClass()); private static final String PORT_ID = "portId"; private static final String DEVICE_ID = "deviceId"; private static final String PORT_NUMBER = "portNumber"; private static final String IP_ADDRESS = "ipAddress"; private static final String MAC_ADDRESS = "macAddress"; private static final String NETWORK_ID = "networkId"; private static final long SLEEP_MIDDLE_MS = 3000; // we wait 3s private static final long TIMEOUT_MS = 10000; // we wait 10s private final K8sApiConfigService configService = get(K8sApiConfigService.class); private final K8sPodAdminService podAdminService = get(K8sPodAdminService.class); private final K8sNamespaceAdminService namespaceAdminService = get(K8sNamespaceAdminService.class); private final K8sServiceAdminService serviceAdminService = get(K8sServiceAdminService.class); private final K8sIngressAdminService ingressAdminService = get(K8sIngressAdminService.class); private final K8sEndpointsAdminService endpointsAdminService = get(K8sEndpointsAdminService.class); private final K8sNetworkAdminService networkAdminService = get(K8sNetworkAdminService.class); private final K8sNodeAdminService nodeAdminService = get(K8sNodeAdminService.class); private final K8sNetworkPolicyAdminService policyAdminService = get(K8sNetworkPolicyAdminService.class); /** * Synchronizes the all states with kubernetes API server. * * @return 200 OK with sync result, 404 not found * @throws InterruptedException exception */ @GET @Produces(MediaType.APPLICATION_JSON) @Path("sync/states") public Response syncStates() { K8sApiConfig config = configService.apiConfigs().stream().findAny().orElse(null); if (config == null) { throw new ItemNotFoundException("Failed to find valid kubernetes API configuration."); } KubernetesClient client = K8sNetworkingUtil.k8sClient(config); if (client == null) { throw new ItemNotFoundException("Failed to connect to kubernetes API server."); } client.namespaces().list().getItems().forEach(ns -> { if (namespaceAdminService.namespace(ns.getMetadata().getUid()) != null) { namespaceAdminService.updateNamespace(ns); } else { namespaceAdminService.createNamespace(ns); } }); client.services().inAnyNamespace().list().getItems().forEach(svc -> { if (serviceAdminService.service(svc.getMetadata().getUid()) != null) { serviceAdminService.updateService(svc); } else { serviceAdminService.createService(svc); } }); client.endpoints().inAnyNamespace().list().getItems().forEach(ep -> { if (endpointsAdminService.endpoints(ep.getMetadata().getUid()) != null) { endpointsAdminService.updateEndpoints(ep); } else { endpointsAdminService.createEndpoints(ep); } }); client.pods().inAnyNamespace().list().getItems().forEach(pod -> { if (podAdminService.pod(pod.getMetadata().getUid()) != null) { podAdminService.updatePod(pod); } else { podAdminService.createPod(pod); } syncPortFromPod(pod, networkAdminService); }); client.extensions().ingresses().inAnyNamespace().list().getItems().forEach(ingress -> { if (ingressAdminService.ingress(ingress.getMetadata().getUid()) != null) { ingressAdminService.updateIngress(ingress); } else { ingressAdminService.createIngress(ingress); } }); client.network().networkPolicies().inAnyNamespace().list().getItems().forEach(policy -> { if (policyAdminService.networkPolicy(policy.getMetadata().getUid()) != null) { policyAdminService.updateNetworkPolicy(policy); } else { policyAdminService.createNetworkPolicy(policy); } }); return ok(mapper().createObjectNode()).build(); } /** * Synchronizes the flow rules. * * @return 200 OK with sync result, 404 not found */ @GET @Produces(MediaType.APPLICATION_JSON) @Path("sync/rules") public Response syncRules() { syncRulesBase(); return ok(mapper().createObjectNode()).build(); } private void syncRulesBase() { nodeAdminService.completeNodes(MASTER).forEach(this::syncRulesBaseForNode); nodeAdminService.completeNodes(MINION).forEach(this::syncRulesBaseForNode); } private void syncRulesBaseForNode(K8sNode k8sNode) { K8sNode updated = k8sNode.updateState(K8sNodeState.INIT); nodeAdminService.updateNode(updated); boolean result = true; long timeoutExpiredMs = System.currentTimeMillis() + TIMEOUT_MS; while (nodeAdminService.node(k8sNode.hostname()).state() != COMPLETE) { long waitMs = timeoutExpiredMs - System.currentTimeMillis(); try { sleep(SLEEP_MIDDLE_MS); } catch (InterruptedException e) { log.error("Exception caused during node synchronization..."); } if (nodeAdminService.node(k8sNode.hostname()).state() == COMPLETE) { break; } else { nodeAdminService.updateNode(updated); log.info("Failed to synchronize flow rules, retrying..."); } if (waitMs <= 0) { result = false; break; } } if (result) { log.info("Successfully synchronize flow rules for node {}!", k8sNode.hostname()); } else { log.warn("Failed to synchronize flow rules for node {}.", k8sNode.hostname()); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.client; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Random; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.exceptions.PreemptiveFastFailException; import org.apache.hadoop.hbase.ipc.RpcExecutor; import org.apache.hadoop.hbase.ipc.SimpleRpcScheduler; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.LoadTestKVGenerator; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @Category({MediumTests.class, ClientTests.class}) public class TestFastFail { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestFastFail.class); private static final Logger LOG = LoggerFactory.getLogger(TestFastFail.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static byte[] FAMILY = Bytes.toBytes("testFamily"); private static final Random random = new Random(); private static int SLAVES = 1; private static byte[] QUALIFIER = Bytes.toBytes("testQualifier"); private static final int SLEEPTIME = 5000; @Rule public TestName name = new TestName(); /** * @throws java.lang.Exception */ @BeforeClass public static void setUpBeforeClass() throws Exception { // Just to prevent fastpath FIFO from picking calls up bypassing the queue. TEST_UTIL.getConfiguration().set( RpcExecutor.CALL_QUEUE_TYPE_CONF_KEY, "deadline"); TEST_UTIL.startMiniCluster(SLAVES); } /** * @throws java.lang.Exception */ @AfterClass public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } /** * @throws java.lang.Exception */ @Before public void setUp() throws Exception { MyPreemptiveFastFailInterceptor.numBraveSouls.set(0); CallQueueTooBigPffeInterceptor.numCallQueueTooBig.set(0); } /** * @throws java.lang.Exception */ @After public void tearDown() throws Exception { // Nothing to do. } @Ignore ("Can go zombie -- see HBASE-14421; FIX") @Test public void testFastFail() throws IOException, InterruptedException { Admin admin = TEST_UTIL.getAdmin(); final String tableName = name.getMethodName(); HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(Bytes .toBytes(tableName))); desc.addFamily(new HColumnDescriptor(FAMILY)); admin.createTable(desc, Bytes.toBytes("aaaa"), Bytes.toBytes("zzzz"), 32); final long numRows = 1000; Configuration conf = TEST_UTIL.getConfiguration(); conf.setLong(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, SLEEPTIME * 100); conf.setInt(HConstants.HBASE_CLIENT_PAUSE, SLEEPTIME / 10); conf.setBoolean(HConstants.HBASE_CLIENT_FAST_FAIL_MODE_ENABLED, true); conf.setLong(HConstants.HBASE_CLIENT_FAST_FAIL_THREASHOLD_MS, 0); conf.setClass(HConstants.HBASE_CLIENT_FAST_FAIL_INTERCEPTOR_IMPL, MyPreemptiveFastFailInterceptor.class, PreemptiveFastFailInterceptor.class); final Connection connection = ConnectionFactory.createConnection(conf); /** * Write numRows worth of data, so that the workers can arbitrarily read. */ List<Put> puts = new ArrayList<>(); for (long i = 0; i < numRows; i++) { byte[] rowKey = longToByteArrayKey(i); Put put = new Put(rowKey); byte[] value = rowKey; // value is the same as the row key put.addColumn(FAMILY, QUALIFIER, value); puts.add(put); } try (Table table = connection.getTable(TableName.valueOf(tableName))) { table.put(puts); LOG.info("Written all puts."); } /** * The number of threads that are going to perform actions against the test * table. */ int nThreads = 100; ExecutorService service = Executors.newFixedThreadPool(nThreads); final CountDownLatch continueOtherHalf = new CountDownLatch(1); final CountDownLatch doneHalfway = new CountDownLatch(nThreads); final AtomicInteger numSuccessfullThreads = new AtomicInteger(0); final AtomicInteger numFailedThreads = new AtomicInteger(0); // The total time taken for the threads to perform the second put; final AtomicLong totalTimeTaken = new AtomicLong(0); final AtomicInteger numBlockedWorkers = new AtomicInteger(0); final AtomicInteger numPreemptiveFastFailExceptions = new AtomicInteger(0); List<Future<Boolean>> futures = new ArrayList<>(); for (int i = 0; i < nThreads; i++) { futures.add(service.submit(new Callable<Boolean>() { /** * The workers are going to perform a couple of reads. The second read * will follow the killing of a regionserver so that we make sure that * some of threads go into PreemptiveFastFailExcception */ @Override public Boolean call() throws Exception { try (Table table = connection.getTable(TableName.valueOf(tableName))) { Thread.sleep(Math.abs(random.nextInt()) % 250); // Add some jitter here byte[] row = longToByteArrayKey(Math.abs(random.nextLong()) % numRows); Get g = new Get(row); g.addColumn(FAMILY, QUALIFIER); try { table.get(g); } catch (Exception e) { LOG.debug("Get failed : ", e); doneHalfway.countDown(); return false; } // Done with one get, proceeding to do the next one. doneHalfway.countDown(); continueOtherHalf.await(); long startTime = System.currentTimeMillis(); g = new Get(row); g.addColumn(FAMILY, QUALIFIER); try { table.get(g); // The get was successful numSuccessfullThreads.addAndGet(1); } catch (Exception e) { if (e instanceof PreemptiveFastFailException) { // We were issued a PreemptiveFastFailException numPreemptiveFastFailExceptions.addAndGet(1); } // Irrespective of PFFE, the request failed. numFailedThreads.addAndGet(1); return false; } finally { long enTime = System.currentTimeMillis(); totalTimeTaken.addAndGet(enTime - startTime); if ((enTime - startTime) >= SLEEPTIME) { // Considering the slow workers as the blockedWorkers. // This assumes that the threads go full throttle at performing // actions. In case the thread scheduling itself is as slow as // SLEEPTIME, then this test might fail and so, we might have // set it to a higher number on slower machines. numBlockedWorkers.addAndGet(1); } } return true; } catch (Exception e) { LOG.error("Caught unknown exception", e); doneHalfway.countDown(); return false; } } })); } doneHalfway.await(); // Kill a regionserver TEST_UTIL.getHBaseCluster().getRegionServer(0).getRpcServer().stop(); TEST_UTIL.getHBaseCluster().getRegionServer(0).stop("Testing"); // Let the threads continue going continueOtherHalf.countDown(); Thread.sleep(2 * SLEEPTIME); // Start a RS in the cluster TEST_UTIL.getHBaseCluster().startRegionServer(); int numThreadsReturnedFalse = 0; int numThreadsReturnedTrue = 0; int numThreadsThrewExceptions = 0; for (Future<Boolean> f : futures) { try { numThreadsReturnedTrue += f.get() ? 1 : 0; numThreadsReturnedFalse += f.get() ? 0 : 1; } catch (Exception e) { numThreadsThrewExceptions++; } } LOG.debug("numThreadsReturnedFalse:" + numThreadsReturnedFalse + " numThreadsReturnedTrue:" + numThreadsReturnedTrue + " numThreadsThrewExceptions:" + numThreadsThrewExceptions + " numFailedThreads:" + numFailedThreads.get() + " numSuccessfullThreads:" + numSuccessfullThreads.get() + " numBlockedWorkers:" + numBlockedWorkers.get() + " totalTimeWaited: " + totalTimeTaken.get() / (numBlockedWorkers.get() == 0 ? Long.MAX_VALUE : numBlockedWorkers .get()) + " numPFFEs: " + numPreemptiveFastFailExceptions.get()); assertEquals("The expected number of all the successfull and the failed " + "threads should equal the total number of threads that we spawned", nThreads, numFailedThreads.get() + numSuccessfullThreads.get()); assertEquals( "All the failures should be coming from the secondput failure", numFailedThreads.get(), numThreadsReturnedFalse); assertEquals("Number of threads that threw execution exceptions " + "otherwise should be 0", 0, numThreadsThrewExceptions); assertEquals("The regionservers that returned true should equal to the" + " number of successful threads", numThreadsReturnedTrue, numSuccessfullThreads.get()); assertTrue( "There will be atleast one thread that retried instead of failing", MyPreemptiveFastFailInterceptor.numBraveSouls.get() > 0); assertTrue( "There will be atleast one PreemptiveFastFail exception," + " otherwise, the test makes little sense." + "numPreemptiveFastFailExceptions: " + numPreemptiveFastFailExceptions.get(), numPreemptiveFastFailExceptions.get() > 0); assertTrue( "Only few thread should ideally be waiting for the dead " + "regionserver to be coming back. numBlockedWorkers:" + numBlockedWorkers.get() + " threads that retried : " + MyPreemptiveFastFailInterceptor.numBraveSouls.get(), numBlockedWorkers.get() <= MyPreemptiveFastFailInterceptor.numBraveSouls .get()); } @Test public void testCallQueueTooBigExceptionDoesntTriggerPffe() throws Exception { Admin admin = TEST_UTIL.getAdmin(); final String tableName = name.getMethodName(); HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(Bytes .toBytes(tableName))); desc.addFamily(new HColumnDescriptor(FAMILY)); admin.createTable(desc, Bytes.toBytes("aaaa"), Bytes.toBytes("zzzz"), 3); Configuration conf = TEST_UTIL.getConfiguration(); conf.setLong(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, 100); conf.setInt(HConstants.HBASE_CLIENT_PAUSE, 500); conf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 1); conf.setBoolean(HConstants.HBASE_CLIENT_FAST_FAIL_MODE_ENABLED, true); conf.setLong(HConstants.HBASE_CLIENT_FAST_FAIL_THREASHOLD_MS, 0); conf.setClass(HConstants.HBASE_CLIENT_FAST_FAIL_INTERCEPTOR_IMPL, CallQueueTooBigPffeInterceptor.class, PreemptiveFastFailInterceptor.class); final Connection connection = ConnectionFactory.createConnection(conf); //Set max call queues size to 0 SimpleRpcScheduler srs = (SimpleRpcScheduler) TEST_UTIL.getHBaseCluster().getRegionServer(0).getRpcServer().getScheduler(); Configuration newConf = HBaseConfiguration.create(TEST_UTIL.getConfiguration()); newConf.setInt("hbase.ipc.server.max.callqueue.length", 0); srs.onConfigurationChange(newConf); try (Table table = connection.getTable(TableName.valueOf(tableName))) { Get get = new Get(new byte[1]); table.get(get); } catch (Throwable ex) { } assertEquals("We should have not entered PFFE mode on CQTBE, but we did;" + " number of times this mode should have been entered:", 0, CallQueueTooBigPffeInterceptor.numCallQueueTooBig.get()); newConf = HBaseConfiguration.create(TEST_UTIL.getConfiguration()); newConf.setInt("hbase.ipc.server.max.callqueue.length", 250); srs.onConfigurationChange(newConf); } public static class MyPreemptiveFastFailInterceptor extends PreemptiveFastFailInterceptor { public static AtomicInteger numBraveSouls = new AtomicInteger(); @Override protected boolean shouldRetryInspiteOfFastFail(FailureInfo fInfo) { boolean ret = super.shouldRetryInspiteOfFastFail(fInfo); if (ret) numBraveSouls.addAndGet(1); return ret; } public MyPreemptiveFastFailInterceptor(Configuration conf) { super(conf); } } private byte[] longToByteArrayKey(long rowKey) { return Bytes.toBytes(LoadTestKVGenerator.md5PrefixedKey(rowKey)); } public static class CallQueueTooBigPffeInterceptor extends PreemptiveFastFailInterceptor { public static AtomicInteger numCallQueueTooBig = new AtomicInteger(); @Override protected void handleFailureToServer(ServerName serverName, Throwable t) { super.handleFailureToServer(serverName, t); numCallQueueTooBig.incrementAndGet(); } public CallQueueTooBigPffeInterceptor(Configuration conf) { super(conf); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.lang.reflect.parser; import java.io.Serializable; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.GenericDeclaration; import java.lang.reflect.GenericSignatureFormatError; import java.lang.reflect.Method; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.lang.reflect.TypeVariable; import org.apache.harmony.lang.reflect.implementation.ParameterizedTypeImpl; import org.apache.harmony.lang.reflect.implementation.TypeVariableImpl; import org.apache.harmony.lang.reflect.repository.ParameterizedTypeRepository; import org.apache.harmony.lang.reflect.repository.TypeVariableRepository; import org.apache.harmony.lang.reflect.support.AuxiliaryChecker; import org.apache.harmony.lang.reflect.support.AuxiliaryCreator; import org.apache.harmony.lang.reflect.support.AuxiliaryFinder; import org.apache.harmony.lang.reflect.support.AuxiliaryLoader; import org.apache.harmony.lang.reflect.support.AuxiliaryUtil; /** * @author Serguei S. Zapreyev */ public class Parser { public static enum SignatureKind { FIELD_SIGNATURE(2), METHOD_SIGNATURE(3), CONSTRUCTOR_SIGNATURE(4), CLASS_SIGNATURE(1); SignatureKind(int value) { this.value = value; } private final int value; public int value() { return value; } } public static InterimGenericDeclaration parseSignature(String signature, SignatureKind kind, java.lang.reflect.GenericDeclaration startPoint) throws GenericSignatureFormatError { return SignatureParser.parseSignature(signature, kind.value()); } //TODO: generic warning /** * ################################################################################ * for j.l.r.Constructor * ################################################################################ */ //TODO: synchronization on constructor? /** * initializes generalized exeptions */ public static Type[] getGenericExceptionTypes(Constructor constructor, String signature) { Type[] genericExceptionTypes = null; //So, here it can be ParameterizedType or TypeVariable or ordinary reference class type elements. Object startPoint = constructor; //FIXME: Performance enhancement String constrSignature = AuxiliaryUtil.toUTF8(signature); // getting this method signature if (constrSignature == null) { //FIXME: Performance enhancement return constructor.getExceptionTypes(); } // constrSignature&constrGenDecl is also the "hard" way to rethrow GenericSignatureFormatError each time for a while InterimConstructorGenericDecl constrGenDecl = (InterimConstructorGenericDecl) Parser.parseSignature(constrSignature, SignatureKind.CONSTRUCTOR_SIGNATURE, (GenericDeclaration)startPoint); // GenericSignatureFormatError can be thrown here InterimType[] throwns = constrGenDecl.throwns; if (throwns == null) { //FIXME: Performance enhancement return constructor.getExceptionTypes(); } int l = throwns.length; genericExceptionTypes = new Type[l]; for (int i = 0; i < l; i++) { if (throwns[i] instanceof InterimParameterizedType) { ParameterizedType pType = ParameterizedTypeRepository.findParameterizedType((InterimParameterizedType) throwns[i], ((InterimParameterizedType) throwns[i]).signature, startPoint); if (pType == null) { try { AuxiliaryFinder.findGenericClassDeclarationForParameterizedType((InterimParameterizedType) throwns[i], startPoint); } catch(Throwable e) { throw new TypeNotPresentException(((InterimParameterizedType) throwns[i]).rawType.classTypeName.substring(1).replace('/', '.'), e); } //check the correspondence of the formal parameter number and the actual argument number: AuxiliaryChecker.checkArgsNumber((InterimParameterizedType) throwns[i], startPoint); // the MalformedParameterizedTypeException may raise here try { pType = new ParameterizedTypeImpl(AuxiliaryCreator.createTypeArgs((InterimParameterizedType) throwns[i], startPoint), AuxiliaryCreator.createRawType((InterimParameterizedType) throwns[i], startPoint), AuxiliaryCreator.createOwnerType((InterimParameterizedType) throwns[i], startPoint)); } catch(ClassNotFoundException e) { throw new TypeNotPresentException(e.getMessage(), e); } ParameterizedTypeRepository.registerParameterizedType(pType, (InterimParameterizedType) throwns[i], ((InterimParameterizedType) throwns[i]).signature, startPoint); } genericExceptionTypes[i] = (Type) pType; } else if (throwns[i] instanceof InterimClassType) { try { genericExceptionTypes[i] = (Type) AuxiliaryLoader.findClass(((InterimClassType)throwns[i]).classTypeName.substring(1).replace('/', '.'), startPoint); // XXX: should we propagate the class loader of initial user's request (Field.getGenericType()) or use this one? } catch (ClassNotFoundException e) { throw new TypeNotPresentException(((InterimClassType)throwns[i]).classTypeName.substring(1).replace('/', '.'), e); } catch (ExceptionInInitializerError e) { } catch (LinkageError e) { } } else if (throwns[i] instanceof InterimTypeVariable) { String tvName = ((InterimTypeVariable) throwns[i]).typeVariableName; TypeVariable variable = TypeVariableRepository.findTypeVariable(tvName, startPoint); if (variable == null) { variable = AuxiliaryFinder.findTypeVariable(tvName, startPoint); if (variable == null) { genericExceptionTypes[i] = (Type) null; break; } } genericExceptionTypes[i] = (Type) variable; } else { // Internal Error } } return genericExceptionTypes; } /** * initializes type parameters */ @SuppressWarnings("unchecked") public static TypeVariable<? extends Constructor>[] getTypeParameters(Constructor constructor, String signature) { //So, here it can be only TypeVariable elements. TypeVariable<Constructor>[] typeParameters = null; Object startPoint = constructor; //FIXME: performance enhancement String constrSignature = AuxiliaryUtil.toUTF8(signature); // getting this method signature if (constrSignature == null) { return new TypeVariable[0]; // can't use <generic> for arrays... } //FIXME: performance enhancement InterimConstructorGenericDecl constrGenDecl = (InterimConstructorGenericDecl) Parser .parseSignature(constrSignature, SignatureKind.CONSTRUCTOR_SIGNATURE, (GenericDeclaration) startPoint); // GenericSignatureFormatError // can be // thrown // here InterimTypeParameter[] pTypeParameters = constrGenDecl.typeParameters; if (pTypeParameters == null) { return new TypeVariable[0]; // can't use <generic> for arrays... } int l = pTypeParameters.length; typeParameters = new TypeVariable[l]; // can't use <generic> for arrays... for (int i = 0; i < l; i++) { String tvName = pTypeParameters[i].typeParameterName; TypeVariable variable = new TypeVariableImpl( (GenericDeclaration) constructor, tvName, pTypeParameters[i]); TypeVariableRepository.registerTypeVariable(variable, tvName, startPoint); typeParameters[i] = variable; } return typeParameters; } /** * initializes generalized parameters */ public static synchronized Type[] getGenericParameterTypes(Constructor constructor, String signature) { //So, here it can be ParameterizedType or TypeVariable or ordinary reference class type elements. Type[] genericParameterTypes = null; Object startPoint = constructor; //FIXME: performance enhancement String constrSignature = AuxiliaryUtil.toUTF8(signature); // getting this method if (constrSignature == null) { //FIXME: performance enhancement return constructor.getParameterTypes(); } // GenericSignatureFormatError can be thrown here //FIXME: performance enhancement InterimConstructorGenericDecl constrGenDecl = (InterimConstructorGenericDecl) Parser .parseSignature(constrSignature, SignatureKind.CONSTRUCTOR_SIGNATURE, (GenericDeclaration) startPoint); InterimType[] methodParameters = constrGenDecl.methodParameters; if (methodParameters == null) { return new Type[0]; } int l = methodParameters.length; genericParameterTypes = new Type[l]; for (int i = 0; i < l; i++) { if (methodParameters[i] instanceof InterimParameterizedType) { ParameterizedType pType = ParameterizedTypeRepository .findParameterizedType( (InterimParameterizedType) methodParameters[i], ((InterimParameterizedType) methodParameters[i]).signature, startPoint); if (pType == null) { try { AuxiliaryFinder .findGenericClassDeclarationForParameterizedType( (InterimParameterizedType) methodParameters[i], startPoint); } catch (Throwable e) { throw new TypeNotPresentException( ((InterimParameterizedType) methodParameters[i]).rawType.classTypeName .substring(1).replace('/', '.'), e); } // check the correspondence of the formal parameter // number and the actual argument number: AuxiliaryChecker.checkArgsNumber( (InterimParameterizedType) methodParameters[i], startPoint); // the // MalformedParameterizedTypeException // may raise here try { pType = new ParameterizedTypeImpl( AuxiliaryCreator .createTypeArgs( (InterimParameterizedType) methodParameters[i], startPoint), AuxiliaryCreator .createRawType( (InterimParameterizedType) methodParameters[i], startPoint), AuxiliaryCreator .createOwnerType( (InterimParameterizedType) methodParameters[i], startPoint)); } catch (ClassNotFoundException e) { throw new TypeNotPresentException(e.getMessage(), e); } ParameterizedTypeRepository .registerParameterizedType( pType, (InterimParameterizedType) methodParameters[i], ((InterimParameterizedType) methodParameters[i]).signature, startPoint); } genericParameterTypes[i] = (Type) pType; } else if (methodParameters[i] instanceof InterimClassType) { try { genericParameterTypes[i] = (Type) AuxiliaryLoader .findClass(((InterimClassType) methodParameters[i]).classTypeName .substring( (((InterimClassType) methodParameters[i]).classTypeName .charAt(0) == 'L' ? 1 : 0)).replace('/', '.'), startPoint); // XXX: should we propagate the class loader of initial user's request (Field.getGenericType()) or use this one? } catch (ClassNotFoundException e) { throw new TypeNotPresentException( ((InterimClassType) methodParameters[i]).classTypeName .substring( (((InterimClassType) methodParameters[i]).classTypeName .charAt(0) == 'L' ? 1 : 0)).replace('/', '.'), e); } catch (ExceptionInInitializerError e) { } catch (LinkageError e) { } } else if (methodParameters[i] instanceof InterimTypeVariable) { String tvName = ((InterimTypeVariable) methodParameters[i]).typeVariableName; TypeVariable variable = TypeVariableRepository .findTypeVariable(tvName, startPoint); if (variable == null) { variable = AuxiliaryFinder.findTypeVariable(tvName, startPoint); if (variable == null) { genericParameterTypes[i] = (Type) null; continue; } } genericParameterTypes[i] = (Type) variable; } else if (methodParameters[i] instanceof InterimGenericArrayType) { genericParameterTypes[i] = AuxiliaryCreator .createGenericArrayType( (InterimGenericArrayType) methodParameters[i], startPoint); } else { // Internal Error } } return genericParameterTypes; } /** * ################################################################################ * for j.l.r.Field * ################################################################################ */ public static Type parseFieldGenericType(Field field, String rawSignature) throws GenericSignatureFormatError { Object startPoint = field.getDeclaringClass(); String signature = AuxiliaryUtil.toUTF8(rawSignature); if (signature == null) { return field.getType(); } InterimFieldGenericDecl decl = (InterimFieldGenericDecl) Parser .parseSignature(signature, SignatureKind.FIELD_SIGNATURE, (GenericDeclaration) startPoint); InterimGenericType fldType = decl.fieldType; if (fldType instanceof InterimTypeVariable) { String tvName = ((InterimTypeVariable) fldType).typeVariableName; TypeVariable variable = TypeVariableRepository .findTypeVariable(tvName, startPoint); if (variable == null) { variable = AuxiliaryFinder.findTypeVariable(tvName, startPoint); if (variable == null) { return (Type) null; } } return (Type) variable; } else if (fldType instanceof InterimParameterizedType) { ParameterizedType pType = ParameterizedTypeRepository .findParameterizedType( (InterimParameterizedType) fldType, ((InterimParameterizedType) fldType).signature, startPoint); if (pType == null) { try { AuxiliaryFinder .findGenericClassDeclarationForParameterizedType( (InterimParameterizedType) fldType, startPoint); } catch (Throwable e) { throw new TypeNotPresentException( ((InterimParameterizedType) fldType).rawType.classTypeName .substring(1).replace('/', '.'), e); } // check the correspondence of the formal parameter number // and the actual argument number: AuxiliaryChecker.checkArgsNumber( (InterimParameterizedType) fldType, startPoint); // the // MalformedParameterizedTypeException // may // raise // here try { pType = new ParameterizedTypeImpl(AuxiliaryCreator .createTypeArgs( (InterimParameterizedType) fldType, startPoint), AuxiliaryCreator .createRawType( (InterimParameterizedType) fldType, startPoint), AuxiliaryCreator .createOwnerType( (InterimParameterizedType) fldType, startPoint)); } catch (ClassNotFoundException e) { throw new TypeNotPresentException(e.getMessage(), e); } ParameterizedTypeRepository.registerParameterizedType( pType, (InterimParameterizedType) fldType, ((InterimParameterizedType) fldType).signature, startPoint); } return pType; } else if (fldType instanceof InterimGenericArrayType) { return AuxiliaryCreator.createGenericArrayType( (InterimGenericArrayType) fldType, startPoint); } else { return field.getType(); } } /** * ################################################################################ * for j.l.r.Method * ################################################################################ */ /** * initializes type parameters */ @SuppressWarnings("unchecked") public static TypeVariable[] getTypeParameters(Method method, String signature) { // So, here it can be only TypeVariable elements. TypeVariable[] typeParameters; Object startPoint = method; // FIXME: performance enhancement String methSignature = AuxiliaryUtil.toUTF8(signature); // getting this // method // FIXME: performance enhancement // signature if (methSignature == null) { return new TypeVariable[0]; } // FIXME: performance enhancement InterimMethodGenericDecl methGenDecl = (InterimMethodGenericDecl) Parser .parseSignature(methSignature, SignatureKind.METHOD_SIGNATURE, (GenericDeclaration) startPoint); // GenericSignatureFormatError // can be thrown // here InterimTypeParameter[] pTypeParameters = methGenDecl.typeParameters; if (pTypeParameters == null) { return new TypeVariable[0]; } int l = pTypeParameters.length; typeParameters = new TypeVariable[l]; for (int i = 0; i < l; i++) { String tvName = pTypeParameters[i].typeParameterName; TypeVariable variable = new TypeVariableImpl( (GenericDeclaration) method, tvName, methGenDecl.typeParameters[i]); TypeVariableRepository.registerTypeVariable(variable, tvName, startPoint); typeParameters[i] = variable; } return typeParameters; } public static Type getGenericReturnTypeImpl(Method method, String signature) throws GenericSignatureFormatError { Object startPoint = method; // FIXME: performance enhancement String methSignature; methSignature = AuxiliaryUtil.toUTF8(signature); if (methSignature == null) { // FIXME: performance enhancement return (Type) method.getReturnType(); } // FIXME: performance enhancement InterimMethodGenericDecl methGenDecl = (InterimMethodGenericDecl) Parser .parseSignature(methSignature, SignatureKind.METHOD_SIGNATURE, (GenericDeclaration) startPoint); InterimType mthdType = methGenDecl.returnValue; if (mthdType instanceof InterimTypeVariable) { String tvName = ((InterimTypeVariable) mthdType).typeVariableName; TypeVariable variable = TypeVariableRepository.findTypeVariable( tvName, startPoint); if (variable == null) { variable = AuxiliaryFinder.findTypeVariable(tvName, startPoint); if (variable == null) { return (Type) null; // compatible behaviour } } return (Type) variable; } else if (mthdType instanceof InterimParameterizedType) { ParameterizedType pType = ParameterizedTypeRepository .findParameterizedType((InterimParameterizedType) mthdType, ((InterimParameterizedType) mthdType).signature, startPoint); if (pType == null) { try { AuxiliaryFinder .findGenericClassDeclarationForParameterizedType( (InterimParameterizedType) mthdType, startPoint); } catch (Throwable e) { throw new TypeNotPresentException( ((InterimParameterizedType) mthdType).rawType.classTypeName .substring(1).replace('/', '.'), e); } // check the correspondence of the formal parameter number and // the actual argument number: AuxiliaryChecker.checkArgsNumber( (InterimParameterizedType) mthdType, startPoint); // the // MalformedParameterizedTypeException // may // raise // here try { pType = new ParameterizedTypeImpl(AuxiliaryCreator .createTypeArgs( (InterimParameterizedType) mthdType, startPoint), AuxiliaryCreator .createRawType((InterimParameterizedType) mthdType, startPoint), AuxiliaryCreator .createOwnerType( (InterimParameterizedType) mthdType, startPoint)); } catch (ClassNotFoundException e) { throw new TypeNotPresentException(e.getMessage(), e); } ParameterizedTypeRepository.registerParameterizedType(pType, (InterimParameterizedType) mthdType, ((InterimParameterizedType) mthdType).signature, startPoint); } return (Type) pType; } else if (mthdType instanceof InterimGenericArrayType) { return AuxiliaryCreator.createGenericArrayType( (InterimGenericArrayType) mthdType, startPoint); } else { return method.getReturnType(); } } /** * initializes generalized exeptions */ public static Type[] getGenericExceptionTypes(Method method, String signature) { // So, here it can be ParameterizedType or TypeVariable or ordinary // reference class type elements. Type[] genericExceptionTypes = null; Object startPoint = method; // FIXME: performance enhancement String methSignature = AuxiliaryUtil.toUTF8(signature); // getting // this // method // FIXME: performance enhancement // signature if (methSignature == null) { return method.getExceptionTypes(); } // FIXME: performance enhancement InterimMethodGenericDecl methGenDecl = (InterimMethodGenericDecl) Parser.parseSignature( methSignature, SignatureKind.METHOD_SIGNATURE, (GenericDeclaration) startPoint); // GenericSignatureFormatError // can be thrown here InterimType[] throwns = methGenDecl.throwns; if (throwns == null) { return method.getExceptionTypes(); } int l = throwns.length; genericExceptionTypes = new Type[l]; for (int i = 0; i < l; i++) { if (throwns[i] instanceof InterimParameterizedType) { ParameterizedType pType = ParameterizedTypeRepository .findParameterizedType( (InterimParameterizedType) throwns[i], ((InterimParameterizedType) throwns[i]).signature, startPoint); if (pType == null) { try { AuxiliaryFinder .findGenericClassDeclarationForParameterizedType( (InterimParameterizedType) throwns[i], startPoint); } catch (Throwable e) { throw new TypeNotPresentException( ((InterimParameterizedType) throwns[i]).rawType.classTypeName .substring(1).replace('/', '.'), e); } // check the correspondence of the formal parameter // number and the actual argument number: AuxiliaryChecker.checkArgsNumber( (InterimParameterizedType) throwns[i], startPoint); // the // MalformedParameterizedTypeException // may raise here try { pType = new ParameterizedTypeImpl( AuxiliaryCreator .createTypeArgs( (InterimParameterizedType) throwns[i], startPoint), AuxiliaryCreator .createRawType( (InterimParameterizedType) throwns[i], startPoint), AuxiliaryCreator .createOwnerType( (InterimParameterizedType) throwns[i], startPoint)); } catch (ClassNotFoundException e) { throw new TypeNotPresentException(e.getMessage(), e); } ParameterizedTypeRepository .registerParameterizedType( pType, (InterimParameterizedType) throwns[i], ((InterimParameterizedType) throwns[i]).signature, startPoint); } genericExceptionTypes[i] = (Type) pType; } else if (throwns[i] instanceof InterimClassType) { try { genericExceptionTypes[i] = (Type) AuxiliaryLoader .findClass(((InterimClassType) throwns[i]).classTypeName .substring( (((InterimClassType) throwns[i]).classTypeName .charAt(0) == 'L' ? 1 : 0)).replace('/', '.'), startPoint); // XXX: // should // we // propagate // the // class // loader // of // initial // user's // request // (Field.getGenericType()) // or // use // this // one? } catch (ClassNotFoundException e) { throw new TypeNotPresentException( ((InterimClassType) throwns[i]).classTypeName .substring( (((InterimClassType) throwns[i]).classTypeName .charAt(0) == 'L' ? 1 : 0)).replace('/', '.'), e); } catch (ExceptionInInitializerError e) { } catch (LinkageError e) { } } else if (throwns[i] instanceof InterimTypeVariable) { String tvName = ((InterimTypeVariable) throwns[i]).typeVariableName; TypeVariable variable = TypeVariableRepository .findTypeVariable(tvName, startPoint); if (variable == null) { variable = AuxiliaryFinder.findTypeVariable(tvName, startPoint); if (variable == null) { genericExceptionTypes[i] = (Type) null; break; } } genericExceptionTypes[i] = (Type) variable; } else { // Internal Error } } return genericExceptionTypes; } /** * initializes generalized parameters */ public static Type[] getGenericParameterTypes(Method method, String signature) { // So, here it can be ParameterizedType or TypeVariable or ordinary // reference class type elements. Type[] genericParameterTypes = null; Object startPoint = method; String methSignature = AuxiliaryUtil.toUTF8(signature); // getting this // method // signature if (methSignature == null) { return method.getParameterTypes(); } InterimMethodGenericDecl methGenDecl = (InterimMethodGenericDecl) Parser .parseSignature(methSignature, SignatureKind.METHOD_SIGNATURE, (GenericDeclaration) startPoint); // GenericSignatureFormatError // can be thrown // here InterimType[] methodParameters = methGenDecl.methodParameters; if (methodParameters == null) { return new Type[0]; } int l = methodParameters.length; genericParameterTypes = new Type[l]; for (int i = 0; i < l; i++) { if (methodParameters[i] instanceof InterimParameterizedType) { ParameterizedType pType = ParameterizedTypeRepository .findParameterizedType( (InterimParameterizedType) methodParameters[i], ((InterimParameterizedType) methodParameters[i]).signature, startPoint); if (pType == null) { try { AuxiliaryFinder .findGenericClassDeclarationForParameterizedType( (InterimParameterizedType) methodParameters[i], startPoint); } catch (Throwable e) { throw new TypeNotPresentException( ((InterimParameterizedType) methodParameters[i]).rawType.classTypeName .substring(1).replace('/', '.'), e); } // check the correspondence of the formal parameter number // and the actual argument number: AuxiliaryChecker.checkArgsNumber( (InterimParameterizedType) methodParameters[i], startPoint); // the // MalformedParameterizedTypeException // may raise here try { pType = new ParameterizedTypeImpl( AuxiliaryCreator .createTypeArgs( (InterimParameterizedType) methodParameters[i], startPoint), AuxiliaryCreator .createRawType( (InterimParameterizedType) methodParameters[i], startPoint), AuxiliaryCreator .createOwnerType( (InterimParameterizedType) methodParameters[i], startPoint)); } catch (ClassNotFoundException e) { throw new TypeNotPresentException(e.getMessage(), e); } ParameterizedTypeRepository .registerParameterizedType( pType, (InterimParameterizedType) methodParameters[i], ((InterimParameterizedType) methodParameters[i]).signature, startPoint); } genericParameterTypes[i] = (Type) pType; } else if (methodParameters[i] instanceof InterimClassType) { try { genericParameterTypes[i] = (Type) AuxiliaryLoader .findClass(((InterimClassType) methodParameters[i]).classTypeName .substring( (((InterimClassType) methodParameters[i]).classTypeName .charAt(0) == 'L' ? 1 : 0)) .replace('/', '.'), startPoint); // XXX: should we // propagate the // class loader of // initial user's // request // (Field.getGenericType()) // or use this one? } catch (ClassNotFoundException e) { throw new TypeNotPresentException( ((InterimClassType) methodParameters[i]).classTypeName .substring( (((InterimClassType) methodParameters[i]).classTypeName .charAt(0) == 'L' ? 1 : 0)) .replace('/', '.'), e); } catch (ExceptionInInitializerError e) { } catch (LinkageError e) { } } else if (methodParameters[i] instanceof InterimTypeVariable) { String tvName = ((InterimTypeVariable) methodParameters[i]).typeVariableName; TypeVariable variable = TypeVariableRepository .findTypeVariable(tvName, startPoint); if (variable == null) { variable = AuxiliaryFinder.findTypeVariable(tvName, startPoint); if (variable == null) { genericParameterTypes[i] = (Type) null; continue; } } genericParameterTypes[i] = (Type) variable; } else if (methodParameters[i] instanceof InterimGenericArrayType) { genericParameterTypes[i] = AuxiliaryCreator .createGenericArrayType( (InterimGenericArrayType) methodParameters[i], startPoint); } else { // Internal Error } } return genericParameterTypes; } /** * ################################################################################ * for j.l.Class * ################################################################################ */ @SuppressWarnings("unchecked") public static TypeVariable[] getTypeParameters(Class c, String rawSignature) { TypeVariable[] typeParameters = null; //So, here it can be only TypeVariable elements. Object startPoint = c; String signature = AuxiliaryUtil.toUTF8(rawSignature); // getting this class signature if (signature == null) { return typeParameters = new TypeVariable[0]; } InterimClassGenericDecl decl = (InterimClassGenericDecl) Parser.parseSignature(signature, SignatureKind.CLASS_SIGNATURE, (GenericDeclaration)startPoint); // GenericSignatureFormatError can be thrown here InterimTypeParameter[] pTypeParameters = decl.typeParameters; if (pTypeParameters == null) { return typeParameters = new TypeVariable[0]; } int l = pTypeParameters.length; typeParameters = new TypeVariable[l]; for (int i = 0; i < l; i++) { String tvName = pTypeParameters[i].typeParameterName; TypeVariable variable = new TypeVariableImpl((GenericDeclaration)c, tvName, decl.typeParameters[i]); TypeVariableRepository.registerTypeVariable(variable, tvName, startPoint); typeParameters[i] = variable; } return typeParameters; } public static Type getGenericSuperClass(Class c, String rawSignature) { Type genericSuperclass = null; Object startPoint = (Object) c; // It should be this class itself // because, for example, superclass may // be a parameterized type with // parameters which are the generic // parameters of this class String signature = AuxiliaryUtil.toUTF8(rawSignature); // getting this class signature if (signature == null) { return genericSuperclass = c.getSuperclass(); } InterimClassGenericDecl decl = (InterimClassGenericDecl) Parser .parseSignature(signature, SignatureKind.CLASS_SIGNATURE, (GenericDeclaration) startPoint); // GenericSignatureFormatError // can be thrown // here InterimType superClassType = decl.superClass; if (superClassType == null) { return genericSuperclass = c.getSuperclass(); } if (superClassType instanceof InterimParameterizedType) { ParameterizedType pType = ParameterizedTypeRepository .findParameterizedType( (InterimParameterizedType) superClassType, ((InterimParameterizedType) superClassType).signature, startPoint); if (pType == null) { try { AuxiliaryFinder .findGenericClassDeclarationForParameterizedType( (InterimParameterizedType) superClassType, startPoint); } catch (Throwable e) { throw new TypeNotPresentException( ((InterimParameterizedType) superClassType).rawType.classTypeName .substring(1).replace('/', '.'), e); } // check the correspondence of the formal parameter number and // the actual argument number: AuxiliaryChecker.checkArgsNumber( (InterimParameterizedType) superClassType, startPoint); // the // MalformedParameterizedTypeException // may // raise // here try { pType = new ParameterizedTypeImpl(AuxiliaryCreator .createTypeArgs( (InterimParameterizedType) superClassType, startPoint), AuxiliaryCreator .createRawType( (InterimParameterizedType) superClassType, startPoint), AuxiliaryCreator .createOwnerType( (InterimParameterizedType) superClassType, startPoint)); } catch (ClassNotFoundException e) { throw new TypeNotPresentException(e.getMessage(), e); } ParameterizedTypeRepository.registerParameterizedType(pType, (InterimParameterizedType) superClassType, signature, startPoint); } genericSuperclass = (Type) pType; } else if (superClassType instanceof InterimClassType) { try { genericSuperclass = (Type) c .getClass() .getClassLoader() //FIXME: any potential issue to change findClass->loadClass .loadClass( AuxiliaryFinder .transform(((InterimClassType) superClassType).classTypeName .substring(1).replace('/', '.'))); // XXX: should we propagate the class loader of initial user's request (Field.getGenericType()) or use this one? } catch (ClassNotFoundException e) { throw new TypeNotPresentException( ((InterimClassType) superClassType).classTypeName .substring(1).replace('/', '.'), e); } catch (ExceptionInInitializerError e) { } catch (LinkageError e) { } } else { // Internal Error } return genericSuperclass; } @SuppressWarnings("unchecked") public static Type[] getGenericInterfaces(Class c, String rawSignature){ Type[] genericInterfaces = null; //So, here it can be only ParameterizedType or ordinary reference class type elements. if (c.isArray()) { return genericInterfaces = new Type[]{Cloneable.class, Serializable.class}; } if (genericInterfaces == null) { Object startPoint = c; // It should be this class itself because, for example, an interface may be a parameterized type with parameters which are the generic parameters of this class String signature = AuxiliaryUtil.toUTF8(rawSignature); // getting this class signature if (signature == null) { return genericInterfaces = c.getInterfaces(); } InterimClassGenericDecl decl = (InterimClassGenericDecl) Parser.parseSignature(signature, SignatureKind.CLASS_SIGNATURE, (GenericDeclaration)startPoint); //GenericSignatureFormatError can be thrown here InterimType[] superInterfaces = decl.superInterfaces; if (superInterfaces == null) { return genericInterfaces = c.getInterfaces(); } int l = superInterfaces.length; genericInterfaces = new Type[l]; for (int i = 0; i < l; i++) { if (superInterfaces[i] instanceof InterimParameterizedType) { ParameterizedType pType = ParameterizedTypeRepository.findParameterizedType((InterimParameterizedType) superInterfaces[i], ((InterimParameterizedType) superInterfaces[i]).signature, startPoint); if (pType == null) { try { AuxiliaryFinder.findGenericClassDeclarationForParameterizedType((InterimParameterizedType) superInterfaces[i], startPoint); } catch(Throwable e) { throw new TypeNotPresentException(((InterimParameterizedType) superInterfaces[i]).rawType.classTypeName.substring(1).replace('/', '.'), e); } //check the correspondence of the formal parameter number and the actual argument number: AuxiliaryChecker.checkArgsNumber((InterimParameterizedType) superInterfaces[i], startPoint); // the MalformedParameterizedTypeException may raise here try { pType = new ParameterizedTypeImpl(AuxiliaryCreator.createTypeArgs((InterimParameterizedType) superInterfaces[i], startPoint), AuxiliaryCreator.createRawType((InterimParameterizedType) superInterfaces[i], startPoint), AuxiliaryCreator.createOwnerType((InterimParameterizedType) superInterfaces[i], startPoint)); } catch(ClassNotFoundException e) { throw new TypeNotPresentException(e.getMessage(), e); } ParameterizedTypeRepository.registerParameterizedType(pType, (InterimParameterizedType) superInterfaces[i], signature, startPoint); } genericInterfaces[i] = (Type) pType; } else if (superInterfaces[i] instanceof InterimClassType) { try { if(c.getClass().getClassLoader() != null){ //FIXME: any potential issue to change findClass->loadClass genericInterfaces[i] = (Type) c.getClass().getClassLoader().loadClass(AuxiliaryFinder.transform(((InterimClassType)superInterfaces[i]).classTypeName.substring(1).replace('/', '.'))); // XXX: should we propagate the class loader of initial user's request (Field.getGenericType()) or use this one? } else { genericInterfaces[i] = (Type) AuxiliaryLoader.findClass(AuxiliaryFinder.transform(((InterimClassType)superInterfaces[i]).classTypeName.substring(1).replace('/', '.')), startPoint); // XXX: should we propagate the class loader of initial user's request (Field.getGenericType()) or use this one? } } catch (ClassNotFoundException e) { throw new TypeNotPresentException(((InterimClassType)superInterfaces[i]).classTypeName.substring(1).replace('/', '.'), e); } catch (ExceptionInInitializerError e) { } catch (LinkageError e) { } } else { // Internal Error } } } return genericInterfaces; } }
/* * Copyright (C) 2008 ZXing authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.acker.simplezxing.camera; import android.content.Context; import android.graphics.Point; import android.graphics.Rect; import android.hardware.Camera; import android.os.Handler; import android.view.SurfaceHolder; import com.acker.simplezxing.camera.open.OpenCamera; import com.acker.simplezxing.camera.open.OpenCameraInterface; import com.google.zxing.PlanarYUVLuminanceSource; import java.io.IOException; /** * This object wraps the Camera service object and expects to be the only one talking to it. The * implementation encapsulates the steps needed to take preview-sized images, which are used for * both preview and decoding. * * @author dswitkin@google.com (Daniel Switkin) */ /** * @date 2016-11-18 16:17 * @auther GuoJinyu * @description modified */ public final class CameraManager { private static final String TAG = CameraManager.class.getSimpleName(); private static final int MIN_FRAME_WIDTH = 240; private static final int MIN_FRAME_HEIGHT = 240; // private static final int MAX_FRAME_WIDTH = 1200; // = 5/8 * 1920 // private static final int MAX_FRAME_HEIGHT = 675; // = 5/8 * 1080 private static final int MAX_FRAME_WIDTH = 1920; // = 3/4 * 2560 private static final int MAX_FRAME_HEIGHT = 1080; // = 3/4 * 1440 private final CameraConfigurationManager configManager; /** * Preview frames are delivered here, which we pass on to the registered handler. Make sure to * clear the handler so it will only receive one message. */ private final PreviewCallback previewCallback; private OpenCamera camera; private AutoFocusManager autoFocusManager; private Rect framingRect; private Rect framingRectInPreview; private boolean initialized; private boolean previewing; private int requestedFramingRectWidth; private int requestedFramingRectHeight; private boolean needFullScreen; public CameraManager(Context context, boolean needExposure, boolean needFullScreen) { this.configManager = new CameraConfigurationManager(context, needExposure); previewCallback = new PreviewCallback(configManager); this.needFullScreen = needFullScreen; } private static int findDesiredDimensionInRange(int resolution, int hardMin, int hardMax) { //int dim = 5 * resolution / 8; // Target 5/8 of each dimension int dim = 3 * resolution / 4; // Target 3/4 of each dimension if (dim < hardMin) { return hardMin; } if (dim > hardMax) { return hardMax; } return dim; } /** * Opens the camera driver and initializes the hardware parameters. * * @param holder The surface object which the camera will draw preview frames into. * @throws IOException Indicates the camera driver failed to open. */ public synchronized void openDriver(SurfaceHolder holder) throws IOException { OpenCamera theCamera = camera; if (theCamera == null) { theCamera = OpenCameraInterface.open(OpenCameraInterface.NO_REQUESTED_CAMERA); if (theCamera == null) { throw new IOException("Camera.open() failed to return object from driver"); } camera = theCamera; } if (!initialized) { initialized = true; configManager.initFromCameraParameters(theCamera); if (requestedFramingRectWidth > 0 && requestedFramingRectHeight > 0) { setManualFramingRect(requestedFramingRectWidth, requestedFramingRectHeight); requestedFramingRectWidth = 0; requestedFramingRectHeight = 0; } } Camera cameraObject = theCamera.getCamera(); Camera.Parameters parameters = cameraObject.getParameters(); String parametersFlattened = parameters == null ? null : parameters.flatten(); // Save these, temporarily try { configManager.setDesiredCameraParameters(theCamera, false); } catch (RuntimeException re) { // Driver failed //Log.w(TAG, "Camera rejected parameters. Setting only minimal safe-mode parameters"); //Log.i(TAG, "Resetting to saved camera params: " + parametersFlattened); // Reset: if (parametersFlattened != null) { parameters = cameraObject.getParameters(); parameters.unflatten(parametersFlattened); try { cameraObject.setParameters(parameters); configManager.setDesiredCameraParameters(theCamera, true); } catch (RuntimeException re2) { // Well, darn. Give up //Log.w(TAG, "Camera rejected even safe-mode parameters! No configuration"); } } } cameraObject.setPreviewDisplay(holder); } public synchronized boolean isOpen() { return camera != null; } /** * Closes the camera driver if still in use. */ public synchronized void closeDriver() { if (camera != null) { camera.getCamera().release(); camera = null; // Make sure to clear these each time we close the camera, so that any scanning rect // requested by intent is forgotten. framingRect = null; framingRectInPreview = null; } } /** * Asks the camera hardware to begin drawing preview frames to the screen. */ public synchronized void startPreview() { OpenCamera theCamera = camera; if (theCamera != null && !previewing) { theCamera.getCamera().startPreview(); previewing = true; autoFocusManager = new AutoFocusManager(theCamera.getCamera()); } } /** * Tells the camera to stop drawing preview frames. */ public synchronized void stopPreview() { if (autoFocusManager != null) { autoFocusManager.stop(); autoFocusManager = null; } if (camera != null && previewing) { camera.getCamera().stopPreview(); previewCallback.setHandler(null, 0); previewing = false; } } /** * Convenience method for {@link com.acker.simplezxing.activity.CaptureActivity} * * @param newSetting if {@code true}, light should be turned on if currently off. And vice versa. */ public synchronized void setTorch(boolean newSetting) { OpenCamera theCamera = camera; if (theCamera != null) { if (newSetting != configManager.getTorchState(theCamera.getCamera())) { boolean wasAutoFocusManager = autoFocusManager != null; if (wasAutoFocusManager) { autoFocusManager.stop(); autoFocusManager = null; } configManager.setTorch(theCamera.getCamera(), newSetting); if (wasAutoFocusManager) { autoFocusManager = new AutoFocusManager(theCamera.getCamera()); autoFocusManager.start(); } } } } /** * A single preview frame will be returned to the handler supplied. The data will arrive as byte[] * in the message.obj field, with width and height encoded as message.arg1 and message.arg2, * respectively. * * @param handler The handler to send the message to. * @param message The what field of the message to be sent. */ public synchronized void requestPreviewFrame(Handler handler, int message) { OpenCamera theCamera = camera; if (theCamera != null && previewing) { previewCallback.setHandler(handler, message); theCamera.getCamera().setOneShotPreviewCallback(previewCallback); } } /** * Calculates the framing rect which the UI should draw to show the user where to place the * barcode. This target helps with alignment as well as forces the user to hold the device * far enough away to ensure the image will be in focus. * * @return The rectangle to draw on screen in window coordinates. */ public synchronized Rect getFramingRect() { if (framingRect == null) { if (camera == null) { return null; } Point screenResolution = configManager.getScreenResolution(); if (screenResolution == null) { // Called early, before init even finished return null; } int width = findDesiredDimensionInRange(screenResolution.x, MIN_FRAME_WIDTH, MAX_FRAME_WIDTH); int height = findDesiredDimensionInRange(screenResolution.y, MIN_FRAME_HEIGHT, MAX_FRAME_HEIGHT); if (width < height) { height = width; } int leftOffset = (screenResolution.x - width) / 2; int topOffset = (screenResolution.y - height) / 2; framingRect = new Rect(leftOffset, topOffset, leftOffset + width, topOffset + height); //Log.d(TAG, "Calculated framing rect: " + framingRect); } return framingRect; } /** * Like {@link #getFramingRect} but coordinates are in terms of the preview frame, * not UI / screen. * * @return {@link Rect} expressing barcode scan area in terms of the preview size */ public synchronized Rect getFramingRectInPreview() { if (framingRectInPreview == null) { Rect framingRect = getFramingRect(); if (framingRect == null) { return null; } Rect rect = new Rect(framingRect); Point cameraResolution = configManager.getCameraResolution(); Point screenResolution = configManager.getScreenResolution(); if (cameraResolution == null || screenResolution == null) { // Called early, before init even finished return null; } if (screenResolution.x < screenResolution.y) { // portrait rect.left = rect.left * cameraResolution.y / screenResolution.x; rect.right = rect.right * cameraResolution.y / screenResolution.x; rect.top = rect.top * cameraResolution.x / screenResolution.y; rect.bottom = rect.bottom * cameraResolution.x / screenResolution.y; } else { // landscape rect.left = rect.left * cameraResolution.x / screenResolution.x; rect.right = rect.right * cameraResolution.x / screenResolution.x; rect.top = rect.top * cameraResolution.y / screenResolution.y; rect.bottom = rect.bottom * cameraResolution.y / screenResolution.y; } framingRectInPreview = rect; } return framingRectInPreview; } /** * Allows third party apps to specify the scanning rectangle dimensions, rather than determine * them automatically based on screen resolution. * * @param width The width in pixels to scan. * @param height The height in pixels to scan. */ private synchronized void setManualFramingRect(int width, int height) { if (initialized) { Point screenResolution = configManager.getScreenResolution(); if (width > screenResolution.x) { width = screenResolution.x; } if (height > screenResolution.y) { height = screenResolution.y; } int leftOffset = (screenResolution.x - width) / 2; int topOffset = (screenResolution.y - height) / 2; framingRect = new Rect(leftOffset, topOffset, leftOffset + width, topOffset + height); //Log.d(TAG, "Calculated manual framing rect: " + framingRect); framingRectInPreview = null; } else { requestedFramingRectWidth = width; requestedFramingRectHeight = height; } } /** * A factory method to build the appropriate LuminanceSource object based on the format * of the preview buffers, as described by Camera.Parameters. * * @param data A preview frame. * @param width The width of the image. * @param height The height of the image. * @return A PlanarYUVLuminanceSource instance. */ public PlanarYUVLuminanceSource buildLuminanceSource(byte[] data, int width, int height) { if (needFullScreen) { return new PlanarYUVLuminanceSource(data, width, height, 0, 0, width, height, false); } else { Rect rect = getFramingRectInPreview(); if (rect == null) { return null; } // Go ahead and assume it's YUV rather than die. return new PlanarYUVLuminanceSource(data, width, height, rect.left, rect.top, rect.width(), rect.height(), false); } } }
package main; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.Writer; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; import java.sql.ResultSet; import java.sql.SQLException; import java.util.Calendar; import java.util.Properties; import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.net.URLEncoder; import com.google.gson.JsonParser; import com.google.gson.JsonObject; import com.google.gson.JsonArray; public class GeonameSetting { public void main() throws Throwable { agtDB agtdb = new agtDB(); Properties prop = new Properties(); InputStream input = null; int geonameNum = 0; String excepLocation = ""; String insertSQL = ""; int agtNum = 0; String userName = ""; try { input = new FileInputStream("config.properties"); prop.load(input); geonameNum = Integer.parseInt(prop.getProperty("GeonameNum")); excepLocation = prop.getProperty("locationExceptionFile"); userName = prop.getProperty("geoNameUser"); } catch (IOException ex) { ex.printStackTrace(); } finally { if (input != null) { try { input.close(); } catch (IOException e) { e.printStackTrace(); } } } tableSetting(); agtdb.runSql2("TRUNCATE location;"); agtdb.runSql2("TRUNCATE geonames;"); locationTableSetting (excepLocation); engColumParse (); insertSQL = "SELECT COUNT(*) FROM agt"; ResultSet rsetAgt= agtdb.runSqlResultSet(insertSQL); rsetAgt.next(); agtNum = rsetAgt.getInt(1); int i =1,k=0, startNum = 1, endNum = 1; insertSQL = "SELECT SISN FROM location"; ResultSet rsetLocation = agtdb.runSqlResultSet(insertSQL); rsetLocation.last(); int[] locationSISN = new int[rsetLocation.getRow()]; rsetLocation.first(); for (i=0;i<locationSISN.length;i++){ locationSISN[i] = rsetLocation.getInt(1); rsetLocation.next(); } i = 1; while(i<=agtNum){ int j = 1; startNum = i; while (j<=geonameNum){ if (locationSISN[k]==i){ i++;k++; }else { j++; i++; } } endNum = i; insertSQL = "SELECT * FROM agt WHERE SISN > " + startNum +" AND SISN < " + (endNum+1) ; try { agtdb = new agtDB(); setGeonameTable(userName,insertSQL); agtdb.finalize(); if ((endNum+1)<agtNum){ TimeUnit.DAYS.sleep(1); } } catch(InterruptedException ex) { Thread.currentThread().interrupt(); } } } public void tableSetting () throws Throwable { //Location table create agtDB agtdb = new agtDB(); String createTableSQL = "CREATE TABLE IF NOT EXISTS location (" + "SISN INT(11) NOT NULL, " + "name VARCHAR(100), " + "scope VARCHAR(2000), " + "coor VARCHAR(100)," + "lat DECIMAL(12,6)," + "lng DECIMAL(12,6)" + ") ENGINE=InnoDB DEFAULT CHARSET=utf8;"; agtdb.runSql2(createTableSQL); //Geoname table create createTableSQL = "CREATE TABLE IF NOT EXISTS geonames (" + "id INT(11) NOT NULL," + "name VARCHAR(100)," + "lat DECIMAL(12,6), " + "lng DECIMAL(12,6)," + "geoYN VARCHAR(2)" + ") ENGINE=InnoDB DEFAULT CHARSET=utf8;"; agtdb.runSql2(createTableSQL); agtdb.finalize(); } public void locationTableSetting (String excepLocation) throws Throwable { agtDB agtdb = new agtDB(); String insertSQL; insertSQL = "SELECT * FROM agt"; ResultSet rsetAgt= agtdb.runSqlResultSet(insertSQL); ResultSet rsetScope; int rowNum; String scopeDes; String desWords[],latWords[],lngWords[]; String engDesc; StringBuilder content = new StringBuilder(); String contentResult=""; Pattern pattern = Pattern.compile("\\d+\\s\\d+\\s(\\d+\\s)?[NS],\\s\\d+\\s\\d+\\s(\\d+\\s)?[WE]."); Matcher matcher = pattern.matcher(""); Pattern patlng = Pattern.compile("\\d+\\s\\d+\\s(\\d+\\s)?[WE]"); Pattern patlat = Pattern.compile("\\d+\\s\\d+\\s(\\d+\\s)?[NS]"); Matcher matlng = patlng.matcher(""); Matcher matlat = patlat.matcher(""); String coor,longitude,latitude; Float lng,lat; File file = new File(excepLocation); Writer out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file), "UTF8")); while(rsetAgt.next()){ engDesc = rsetAgt.getString("ENG_DESC"); if (engDesc== null){ }else{ insertSQL="SELECT * FROM agt_eng_scope WHERE SISN = " + rsetAgt.getInt("SISN"); //System.out.println(insertSQL); //System.out.println(rsetAgt.getString("ENG_DESC")); rsetScope = agtdb.runSqlResultSet(insertSQL); rowNum=0; if (rsetScope.next()){ rsetScope.last(); rowNum=rsetScope.getRow(); rsetScope.first(); if (rowNum>1){ do{ scopeDes=rsetScope.getString("ENG_SCOPE"); desWords=scopeDes.trim().split(" "); matcher = pattern.matcher(scopeDes); if (desWords[0].contains("Location")){ if (matcher.find()){ coor = matcher.group(); matlng = patlng.matcher(coor); matlng.find(); longitude = matlng.group(); matlat = patlat.matcher(coor); matlat.find(); latitude = matlat.group(); latWords = latitude.trim().split(" "); lngWords = longitude.trim().split(" "); scopeDes=scopeDes.replace("\"", "\\\""); if (latWords.length==4){ lat = latWords[3].contains("S")? -1*((((Float.parseFloat(latWords[2])/60)+Float.parseFloat(latWords[1]))/60)+Float.parseFloat(latWords[0])) : ((((Float.parseFloat(latWords[2])/60)+Float.parseFloat(latWords[1]))/60)+Float.parseFloat(latWords[0])); if (lngWords.length==4){ lng = lngWords[3].contains("W")? -1*((((Float.parseFloat(lngWords[2])/60)+Float.parseFloat(lngWords[1]))/60)+Float.parseFloat(lngWords[0])) : ((((Float.parseFloat(lngWords[2])/60)+Float.parseFloat(lngWords[1]))/60)+Float.parseFloat(lngWords[0])); insertSQL= "INSERT INTO location"+"(SISN,name,scope,coor,lat,lng)"+"VALUES"+ "("+ rsetAgt.getInt("SISN") +","+"\""+ rsetAgt.getString("ENG_DESC") +"\""+","+"\""+ scopeDes +"\""+","+"\""+ coor +"\""+","+ lat +","+ lng +")"; //System.out.println(insertSQL); agtdb.runSql(insertSQL); break; }else if (lngWords.length==3){ lng = lngWords[2].contains("W")? -1*((Float.parseFloat(lngWords[1])/60)+Float.parseFloat(lngWords[0])) : ((Float.parseFloat(lngWords[1])/60)+Float.parseFloat(lngWords[0])); insertSQL= "INSERT INTO location"+"(SISN,name,scope,coor,lat,lng)"+"VALUES"+ "("+ rsetAgt.getInt("SISN") +","+"\""+ rsetAgt.getString("ENG_DESC") +"\""+","+"\""+ scopeDes +"\""+","+"\""+ coor +"\""+","+ lat +","+ lng +")"; //System.out.println(insertSQL); agtdb.runSql(insertSQL); break; } }else if (latWords.length==3) { lat = latWords[2].contains("S")? -1*((Float.parseFloat(latWords[1])/60)+Float.parseFloat(latWords[0])) : ((Float.parseFloat(latWords[1])/60)+Float.parseFloat(latWords[0])); if (lngWords.length==4){ lng = lngWords[3].contains("W")? -1*((((Float.parseFloat(lngWords[2])/60)+Float.parseFloat(lngWords[1]))/60)+Float.parseFloat(lngWords[0])) : ((((Float.parseFloat(lngWords[2])/60)+Float.parseFloat(lngWords[1]))/60)+Float.parseFloat(lngWords[0])); insertSQL= "INSERT INTO location"+"(SISN,name,scope,coor,lat,lng)"+"VALUES"+ "("+ rsetAgt.getInt("SISN") +","+"\""+ rsetAgt.getString("ENG_DESC") +"\""+","+"\""+ scopeDes +"\""+","+"\""+ coor +"\""+","+ lat +","+ lng +")"; //System.out.println(insertSQL); agtdb.runSql(insertSQL); break; }else if (lngWords.length==3){ lng = lngWords[2].contains("W")? -1*((Float.parseFloat(lngWords[1])/60)+Float.parseFloat(lngWords[0])) : ((Float.parseFloat(lngWords[1])/60)+Float.parseFloat(lngWords[0])); insertSQL= "INSERT INTO location"+"(SISN,name,scope,coor,lat,lng)"+"VALUES"+ "("+ rsetAgt.getInt("SISN") +","+"\""+ rsetAgt.getString("ENG_DESC") +"\""+","+"\""+ scopeDes +"\""+","+"\""+ coor +"\""+","+ lat +","+ lng +")"; //System.out.println(insertSQL); agtdb.runSql(insertSQL); break; } } }else{ scopeDes=scopeDes.replace("\"", "\\\""); insertSQL= "INSERT INTO location"+"(SISN,name,scope)"+"VALUES"+ "("+ rsetAgt.getInt("SISN") +","+ rsetAgt.getString("ENG_DESC") +","+ scopeDes +","+ ")"; agtdb.runSql(insertSQL); content.append("SISN = "+rsetAgt.getInt("SISN")+"\n"+"Name = "+rsetAgt.getString("ENG_DESC")+"\n"+"ENG_SCOPE = "+rsetScope.getString("ENG_SCOPE")+"\n"+"\n"); } } }while(rsetScope.next()); }else{ scopeDes=rsetScope.getString("ENG_SCOPE"); desWords=scopeDes.trim().split(" "); if (desWords[0].contains("Location")){ matcher = pattern.matcher(scopeDes); if (matcher.find()){ coor = matcher.group(); matlng = patlng.matcher(coor); matlng.find(); longitude = matlng.group(); matlat = patlat.matcher(coor); matlat.find(); latitude = matlat.group(); latWords = latitude.trim().split(" "); lngWords = longitude.trim().split(" "); scopeDes=scopeDes.replace("\"", "\\\""); if (latWords.length==4){ lat = latWords[3].contains("S")? -1*((((Float.parseFloat(latWords[2])/60)+Float.parseFloat(latWords[1]))/60)+Float.parseFloat(latWords[0])) : ((((Float.parseFloat(latWords[2])/60)+Float.parseFloat(latWords[1]))/60)+Float.parseFloat(latWords[0])); if (lngWords.length==4){ lng = lngWords[3].contains("W")? -1*((((Float.parseFloat(lngWords[2])/60)+Float.parseFloat(lngWords[1]))/60)+Float.parseFloat(lngWords[0])) : ((((Float.parseFloat(lngWords[2])/60)+Float.parseFloat(lngWords[1]))/60)+Float.parseFloat(lngWords[0])); insertSQL= "INSERT INTO location"+"(SISN,name,scope,coor,lat,lng)"+"VALUES"+ "("+ rsetAgt.getInt("SISN") +","+"\""+ rsetAgt.getString("ENG_DESC") +"\""+","+"\""+ scopeDes +"\""+","+"\""+ coor +"\""+","+ lat +","+ lng +")"; //System.out.println(insertSQL); agtdb.runSql(insertSQL); }else if (lngWords.length==3){ lng = lngWords[2].contains("W")? -1*((Float.parseFloat(lngWords[1])/60)+Float.parseFloat(lngWords[0])) : ((Float.parseFloat(lngWords[1])/60)+Float.parseFloat(lngWords[0])); insertSQL= "INSERT INTO location"+"(SISN,name,scope,coor,lat,lng)"+"VALUES"+ "("+ rsetAgt.getInt("SISN") +","+"\""+ rsetAgt.getString("ENG_DESC") +"\""+","+"\""+ scopeDes +"\""+","+"\""+ coor +"\""+","+ lat +","+ lng +")"; //System.out.println(insertSQL); agtdb.runSql(insertSQL); } }else if (latWords.length==3) { lat = latWords[2].contains("S")? -1*((Float.parseFloat(latWords[1])/60)+Float.parseFloat(latWords[0])) : ((Float.parseFloat(latWords[1])/60)+Float.parseFloat(latWords[0])); if (lngWords.length==4){ lng = lngWords[3].contains("W")? -1*((((Float.parseFloat(lngWords[2])/60)+Float.parseFloat(lngWords[1]))/60)+Float.parseFloat(lngWords[0])) : ((((Float.parseFloat(lngWords[2])/60)+Float.parseFloat(lngWords[1]))/60)+Float.parseFloat(lngWords[0])); insertSQL= "INSERT INTO location"+"(SISN,name,scope,coor,lat,lng)"+"VALUES"+ "("+ rsetAgt.getInt("SISN") +","+"\""+ rsetAgt.getString("ENG_DESC") +"\""+","+"\""+ scopeDes +"\""+","+"\""+ coor +"\""+","+ lat +","+ lng +")"; //System.out.println(insertSQL); agtdb.runSql(insertSQL); }else if (lngWords.length==3){ lng = lngWords[2].contains("W")? -1*((Float.parseFloat(lngWords[1])/60)+Float.parseFloat(lngWords[0])) : ((Float.parseFloat(lngWords[1])/60)+Float.parseFloat(lngWords[0])); insertSQL= "INSERT INTO location"+"(SISN,name,scope,coor,lat,lng)"+"VALUES"+ "("+ rsetAgt.getInt("SISN") +","+"\""+ rsetAgt.getString("ENG_DESC") +"\""+","+"\""+ scopeDes +"\""+","+"\""+ coor +"\""+","+ lat +","+ lng +")"; //System.out.println(insertSQL); agtdb.runSql(insertSQL); } } }else{ scopeDes=scopeDes.replace("\"", "\\\""); insertSQL= "INSERT INTO location"+"(SISN,name,scope)"+"VALUES"+ "("+ rsetAgt.getInt("SISN") +","+"\""+ rsetAgt.getString("ENG_DESC") +"\""+","+"\""+ scopeDes +"\""+ ")"; //System.out.println(insertSQL); agtdb.runSql(insertSQL); content.append("SISN = "+rsetAgt.getInt("SISN")+"\n"+"Name = "+rsetAgt.getString("ENG_DESC")+"\n"+"ENG_SCOPE = "+rsetScope.getString("ENG_SCOPE")+"\n"+"\n"); } } } } } } contentResult=content.toString(); out.append(contentResult); out.flush(); out.close(); agtdb.finalize(); System.out.println("Location Table First Part Done"); } public void engColumParse () throws Throwable { agtDB agtdb = new agtDB(); String insertSQL; insertSQL = "SELECT SISN,ENG_DESC FROM agt"; ResultSet rsetAgt= agtdb.runSqlResultSet(insertSQL); ResultSet rsetScope; String scopeDes; String latWords[],lngWords[]; Pattern pattern = Pattern.compile("\\d+\\s\\d+\\s(\\d+\\s)?[NS],\\s\\d+\\s\\d+\\s(\\d+\\s)?[WE]"); Matcher matcher = pattern.matcher(""); Pattern patlng = Pattern.compile("\\d+\\s\\d+\\s(\\d+\\s)?[WE]"); Pattern patlat = Pattern.compile("\\d+\\s\\d+\\s(\\d+\\s)?[NS]"); Matcher matlng = patlng.matcher(""); Matcher matlat = patlat.matcher(""); String coor,longitude,latitude; Float lng,lat; System.out.println("Parse coordinate in Eng_colum Start"); while(rsetAgt.next()){ insertSQL="SELECT * FROM location WHERE SISN = " + rsetAgt.getInt("SISN"); rsetScope = agtdb.runSqlResultSet(insertSQL); scopeDes=rsetAgt.getString("ENG_DESC"); //System.out.println(scopeDes); if (scopeDes != null){ matcher = pattern.matcher(scopeDes); if (matcher.find() && rsetScope.next()==false){ //count++; coor = matcher.group(); matlng = patlng.matcher(coor); matlng.find(); longitude = matlng.group(); matlat = patlat.matcher(coor); matlat.find(); latitude = matlat.group(); latWords = latitude.trim().split(" "); lngWords = longitude.trim().split(" "); //System.out.println(rsetAgt.getInt("SISN") +" "+ scopeDes +" " + coor); if (latWords.length==4){ lat = latWords[3].contains("S")? -1*((((Float.parseFloat(latWords[2])/60)+Float.parseFloat(latWords[1]))/60)+Float.parseFloat(latWords[0])) : ((((Float.parseFloat(latWords[2])/60)+Float.parseFloat(latWords[1]))/60)+Float.parseFloat(latWords[0])); if (lngWords.length==4){ lng = lngWords[3].contains("W")? -1*((((Float.parseFloat(lngWords[2])/60)+Float.parseFloat(lngWords[1]))/60)+Float.parseFloat(lngWords[0])) : ((((Float.parseFloat(lngWords[2])/60)+Float.parseFloat(lngWords[1]))/60)+Float.parseFloat(lngWords[0])); insertSQL= "INSERT INTO location"+"(SISN,name,scope,coor,lat,lng)"+"VALUES"+ "("+ rsetAgt.getInt("SISN") +","+"\""+ rsetAgt.getString("ENG_DESC") +"\""+","+"\""+ scopeDes +"\""+","+"\""+ coor +"\""+","+ lat +","+ lng +")"; //System.out.println(insertSQL); agtdb.runSql(insertSQL); }else if (lngWords.length==3){ lng = lngWords[2].contains("W")? -1*((Float.parseFloat(lngWords[1])/60)+Float.parseFloat(lngWords[0])) : ((Float.parseFloat(lngWords[1])/60)+Float.parseFloat(lngWords[0])); insertSQL= "INSERT INTO location"+"(SISN,name,scope,coor,lat,lng)"+"VALUES"+ "("+ rsetAgt.getInt("SISN") +","+"\""+ rsetAgt.getString("ENG_DESC") +"\""+","+"\""+ scopeDes +"\""+","+"\""+ coor +"\""+","+ lat +","+ lng +")"; //System.out.println(insertSQL); agtdb.runSql(insertSQL); } }else if (latWords.length==3) { lat = latWords[2].contains("S")? -1*((Float.parseFloat(latWords[1])/60)+Float.parseFloat(latWords[0])) : ((Float.parseFloat(latWords[1])/60)+Float.parseFloat(latWords[0])); if (lngWords.length==4){ lng = lngWords[3].contains("W")? -1*((((Float.parseFloat(lngWords[2])/60)+Float.parseFloat(lngWords[1]))/60)+Float.parseFloat(lngWords[0])) : ((((Float.parseFloat(lngWords[2])/60)+Float.parseFloat(lngWords[1]))/60)+Float.parseFloat(lngWords[0])); insertSQL= "INSERT INTO location"+"(SISN,name,scope,coor,lat,lng)"+"VALUES"+ "("+ rsetAgt.getInt("SISN") +","+"\""+ rsetAgt.getString("ENG_DESC") +"\""+","+"\""+ scopeDes +"\""+","+"\""+ coor +"\""+","+ lat +","+ lng +")"; //System.out.println(insertSQL); agtdb.runSql(insertSQL); }else if (lngWords.length==3){ lng = lngWords[2].contains("W")? -1*((Float.parseFloat(lngWords[1])/60)+Float.parseFloat(lngWords[0])) : ((Float.parseFloat(lngWords[1])/60)+Float.parseFloat(lngWords[0])); insertSQL= "INSERT INTO location"+"(SISN,name,scope,coor,lat,lng)"+"VALUES"+ "("+ rsetAgt.getInt("SISN") +","+"\""+ rsetAgt.getString("ENG_DESC") +"\""+","+"\""+ scopeDes +"\""+","+"\""+ coor +"\""+","+ lat +","+ lng +")"; //System.out.println(insertSQL); agtdb.runSql(insertSQL); } } } } } System.out.println("Location Table Second Part done"); agtdb.finalize(); } public void setGeonameTable (String userName, String insertSQL) throws SQLException, IOException { agtDB agtdb = new agtDB(); JsonObject firstObj = new JsonObject(); float lat; float lng; String geoUrl; ResultSet rsetAgt= agtdb.runSqlResultSet(insertSQL); ResultSet rsetScope; String output; int resultCount; JsonObject geoObject=new JsonObject(); JsonArray geonames=new JsonArray(); String engDesc; System.out.println("Geoname setting Start"); File logfile = new File("log.txt"); Writer logout = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(logfile,true), "UTF8")); logout.append(Calendar.getInstance().getTime() + " Geoname setting Start \n"); logout.flush(); while(rsetAgt.next()){ //System.out.println(rsetAgt.getInt("SISN")); engDesc = rsetAgt.getString("ENG_DESC"); if (engDesc== null){ insertSQL= "INSERT INTO geonames"+"(id,geoYN)"+"VALUES"+ "("+ rsetAgt.getInt("SISN") +","+"\""+"O"+"\""+")"; agtdb.runSql(insertSQL); }else{ insertSQL="SELECT * FROM location WHERE SISN = " + rsetAgt.getInt("SISN"); //System.out.println(insertSQL); //System.out.println(rsetAgt.getString("ENG_DESC")); rsetScope = agtdb.runSqlResultSet(insertSQL); geoUrl = "http://api.geonames.org/searchJSON?q=" + URLEncoder.encode(rsetAgt.getString("ENG_DESC").replace("region",""), "UTF-8") + "&username="+userName; if (rsetScope.next()){ insertSQL= "INSERT INTO geonames"+"(id,name,lat,lng,geoYN)"+"VALUES"+ "("+ rsetAgt.getInt("SISN") +","+"\""+ rsetAgt.getString("ENG_DESC")+"\""+","+rsetScope.getFloat("lat")+","+rsetScope.getFloat("lng")+","+"\""+"N"+"\""+")"; //System.out.println(rsetAgt.getString("ENG_DESC")); agtdb.runSql(insertSQL); }else{ geoUrl = "http://api.geonames.org/searchJSON?q=" + URLEncoder.encode(rsetAgt.getString("ENG_DESC").replace("region",""), "UTF-8") + "&username="+userName; //System.out.println(geoUrl); output = connectToPage(geoUrl); geoObject=(JsonObject)new JsonParser().parse(output); resultCount = geoObject.get("totalResultsCount").getAsInt(); geonames = geoObject.get("geonames").getAsJsonArray(); if (resultCount == 0){ insertSQL= "INSERT INTO geonames"+"(id,name,geoYN)"+"VALUES"+ "("+ rsetAgt.getInt("SISN") +","+"\""+ rsetAgt.getString("ENG_DESC")+"\""+","+"\""+"O"+"\""+")"; //System.out.println(rsetAgt.getString("ENG_DESC")); agtdb.runSql(insertSQL); }else { firstObj=geonames.get(0).getAsJsonObject(); lat=firstObj.get("lat").getAsFloat(); lng=firstObj.get("lng").getAsFloat(); insertSQL= "INSERT INTO geonames"+"(id,name,lat,lng,geoYN)"+"VALUES"+ "("+ rsetAgt.getInt("SISN") +","+"\""+ rsetAgt.getString("ENG_DESC")+"\""+","+lat+","+lng+","+"\""+"Y"+"\""+")"; //System.out.println(rsetAgt.getString("ENG_DESC")); agtdb.runSql(insertSQL); } } } } System.out.println("Geoname Setting Done"); logout.append(Calendar.getInstance().getTime() + " Geoname Setting Done \n"); logout.flush(); logout.close(); } public static String connectToPage(String pageURL){ try { URL url = new URL(pageURL); //System.out.println(url); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod("GET"); conn.setRequestProperty("Accept", "application/json"); if (conn.getResponseCode() != 200) { throw new RuntimeException("Failed : HTTP error code : " + conn.getResponseCode()); } BufferedReader br = new BufferedReader(new InputStreamReader( (conn.getInputStream()))); String output; output = br.readLine(); //System.out.println("Output from Server .... "); //System.out.println(output); conn.disconnect(); return output; } catch (MalformedURLException e) { e.printStackTrace(); return ("MalformedURLEXception e"); } catch (IOException e) { e.printStackTrace(); return ("IOException e"); } } }
/** * Copyright (c) 2000-present Liferay, Inc. All rights reserved. * * This library is free software; you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License as published by the Free * Software Foundation; either version 2.1 of the License, or (at your option) * any later version. * * This library is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more * details. */ package org.oep.dossiermgt.model.impl; import com.liferay.portal.kernel.util.StringBundler; import com.liferay.portal.kernel.util.StringPool; import com.liferay.portal.model.CacheModel; import org.oep.dossiermgt.model.DossierFolder; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.Date; /** * The cache model class for representing DossierFolder in entity cache. * * @author trungdk * @see DossierFolder * @generated */ public class DossierFolderCacheModel implements CacheModel<DossierFolder>, Externalizable { @Override public String toString() { StringBundler sb = new StringBundler(35); sb.append("{uuid="); sb.append(uuid); sb.append(", dossierFolderId="); sb.append(dossierFolderId); sb.append(", userId="); sb.append(userId); sb.append(", groupId="); sb.append(groupId); sb.append(", companyId="); sb.append(companyId); sb.append(", createDate="); sb.append(createDate); sb.append(", modifiedDate="); sb.append(modifiedDate); sb.append(", folderName="); sb.append(folderName); sb.append(", parentDossierFolderId="); sb.append(parentDossierFolderId); sb.append(", sequenceNo="); sb.append(sequenceNo); sb.append(", procedureFilter="); sb.append(procedureFilter); sb.append(", statusFilter="); sb.append(statusFilter); sb.append(", tagFilter="); sb.append(tagFilter); sb.append(", filterByOrganization="); sb.append(filterByOrganization); sb.append(", filterByUser="); sb.append(filterByUser); sb.append(", orderBy="); sb.append(orderBy); sb.append(", counting="); sb.append(counting); sb.append("}"); return sb.toString(); } @Override public DossierFolder toEntityModel() { DossierFolderImpl dossierFolderImpl = new DossierFolderImpl(); if (uuid == null) { dossierFolderImpl.setUuid(StringPool.BLANK); } else { dossierFolderImpl.setUuid(uuid); } dossierFolderImpl.setDossierFolderId(dossierFolderId); dossierFolderImpl.setUserId(userId); dossierFolderImpl.setGroupId(groupId); dossierFolderImpl.setCompanyId(companyId); if (createDate == Long.MIN_VALUE) { dossierFolderImpl.setCreateDate(null); } else { dossierFolderImpl.setCreateDate(new Date(createDate)); } if (modifiedDate == Long.MIN_VALUE) { dossierFolderImpl.setModifiedDate(null); } else { dossierFolderImpl.setModifiedDate(new Date(modifiedDate)); } if (folderName == null) { dossierFolderImpl.setFolderName(StringPool.BLANK); } else { dossierFolderImpl.setFolderName(folderName); } dossierFolderImpl.setParentDossierFolderId(parentDossierFolderId); dossierFolderImpl.setSequenceNo(sequenceNo); if (procedureFilter == null) { dossierFolderImpl.setProcedureFilter(StringPool.BLANK); } else { dossierFolderImpl.setProcedureFilter(procedureFilter); } if (statusFilter == null) { dossierFolderImpl.setStatusFilter(StringPool.BLANK); } else { dossierFolderImpl.setStatusFilter(statusFilter); } if (tagFilter == null) { dossierFolderImpl.setTagFilter(StringPool.BLANK); } else { dossierFolderImpl.setTagFilter(tagFilter); } dossierFolderImpl.setFilterByOrganization(filterByOrganization); dossierFolderImpl.setFilterByUser(filterByUser); if (orderBy == null) { dossierFolderImpl.setOrderBy(StringPool.BLANK); } else { dossierFolderImpl.setOrderBy(orderBy); } dossierFolderImpl.setCounting(counting); dossierFolderImpl.resetOriginalValues(); return dossierFolderImpl; } @Override public void readExternal(ObjectInput objectInput) throws IOException { uuid = objectInput.readUTF(); dossierFolderId = objectInput.readLong(); userId = objectInput.readLong(); groupId = objectInput.readLong(); companyId = objectInput.readLong(); createDate = objectInput.readLong(); modifiedDate = objectInput.readLong(); folderName = objectInput.readUTF(); parentDossierFolderId = objectInput.readLong(); sequenceNo = objectInput.readInt(); procedureFilter = objectInput.readUTF(); statusFilter = objectInput.readUTF(); tagFilter = objectInput.readUTF(); filterByOrganization = objectInput.readInt(); filterByUser = objectInput.readInt(); orderBy = objectInput.readUTF(); counting = objectInput.readInt(); } @Override public void writeExternal(ObjectOutput objectOutput) throws IOException { if (uuid == null) { objectOutput.writeUTF(StringPool.BLANK); } else { objectOutput.writeUTF(uuid); } objectOutput.writeLong(dossierFolderId); objectOutput.writeLong(userId); objectOutput.writeLong(groupId); objectOutput.writeLong(companyId); objectOutput.writeLong(createDate); objectOutput.writeLong(modifiedDate); if (folderName == null) { objectOutput.writeUTF(StringPool.BLANK); } else { objectOutput.writeUTF(folderName); } objectOutput.writeLong(parentDossierFolderId); objectOutput.writeInt(sequenceNo); if (procedureFilter == null) { objectOutput.writeUTF(StringPool.BLANK); } else { objectOutput.writeUTF(procedureFilter); } if (statusFilter == null) { objectOutput.writeUTF(StringPool.BLANK); } else { objectOutput.writeUTF(statusFilter); } if (tagFilter == null) { objectOutput.writeUTF(StringPool.BLANK); } else { objectOutput.writeUTF(tagFilter); } objectOutput.writeInt(filterByOrganization); objectOutput.writeInt(filterByUser); if (orderBy == null) { objectOutput.writeUTF(StringPool.BLANK); } else { objectOutput.writeUTF(orderBy); } objectOutput.writeInt(counting); } public String uuid; public long dossierFolderId; public long userId; public long groupId; public long companyId; public long createDate; public long modifiedDate; public String folderName; public long parentDossierFolderId; public int sequenceNo; public String procedureFilter; public String statusFilter; public String tagFilter; public int filterByOrganization; public int filterByUser; public String orderBy; public int counting; }
package edu.isi.pegasus.planner.catalog.transformation.client; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import edu.isi.pegasus.common.util.Currently; import edu.isi.pegasus.common.util.XMLWriter; import edu.isi.pegasus.planner.catalog.classes.SysInfo; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.Container; import edu.isi.pegasus.planner.catalog.transformation.classes.Container.MountPoint; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.planner.catalog.transformation.classes.TransformationCatalogKeywords; import edu.isi.pegasus.planner.catalog.transformation.classes.TransformationStore; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.dax.Executable; import edu.isi.pegasus.planner.dax.Executable.ARCH; import edu.isi.pegasus.planner.dax.Executable.OS; import edu.isi.pegasus.planner.dax.PFN; import java.io.BufferedWriter; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.Writer; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; /** * This is a utility class for converting transformation catalog into different formats. * * @author prasanth@isi.edu * @version $Revision $ */ public class TCFormatUtility { /** * Converts the transformations into multi line text format * * @param mTCStore the transformation store * @return the text format */ public static String toTextFormat(TransformationStore mTCStore) { String newline = System.getProperty("line.separator", "\r\n"); String indent = ""; StringBuffer buf = new StringBuffer(); String newIndent = indent + "\t"; // write header buf.append( "# multiple line text-based transformation catalog: " + Currently.iso8601(false, true, true, new Date())); buf.append(newline); // write out data // traverse through all the logical transformations in the // catalog for (Iterator i = mTCStore.getTransformations(null, null).iterator(); i.hasNext(); ) { // transformation is the complete name comprised of namespace,name,version String transformation = (String) i.next(); buf.append(indent); buf.append("tr "); buf.append(transformation); buf.append(" {"); buf.append(newline); // get all the entries for that transformations on all sites for (TransformationCatalogEntry entry : mTCStore.getEntries(transformation, (String) null)) { // write out all the entries for the transformation buf.append(toText(entry, newline, newIndent)); } buf.append(indent); buf.append("}"); buf.append(newline); buf.append(newline); } return buf.toString(); } /** * Converts the transformation catalog entry object to the multi line textual representation. * e.g. * * <p>site wind { profile env "me" "with" profile condor "more" "test" pfn "/path/to/keg" arch * "x86" os "linux" osrelease "fc" osversion "4" type "STAGEABLE" } * * @param entry the transformation catalog entry * @param newline the newline characters * @param indent the indentation to use * @return the textual description */ private static String toText(TransformationCatalogEntry entry, String newline, String indent) { StringBuffer sb = new StringBuffer(); indent = (indent != null && indent.length() > 0) ? indent : ""; String newIndent = indent + "\t"; sb.append(indent); sb.append("site").append(" ").append(entry.getResourceId()).append(" {").append(newline); // list out all the profiles List<Profile> profiles = entry.getProfiles(); if (profiles != null) { for (Profile p : profiles) { sb.append(newIndent) .append("profile") .append(" ") .append(p.getProfileNamespace()) .append(" ") .append(quote(p.getProfileKey())) .append(" ") .append(quote(p.getProfileValue())) .append(" ") .append(newline); } } // write out the pfn addLineToText(sb, newIndent, newline, "pfn", entry.getPhysicalTransformation()); // write out sysinfo SysInfo s = entry.getSysInfo(); SysInfo.Architecture arch = s.getArchitecture(); if (arch != null) { addLineToText(sb, newIndent, newline, "arch", arch.toString()); } SysInfo.OS os = s.getOS(); if (os != null) { addLineToText(sb, newIndent, newline, "os", os.toString()); } String osrelease = s.getOSRelease(); if (osrelease != null && osrelease.length() > 0) { addLineToText(sb, newIndent, newline, "osrelease", osrelease); } String osversion = s.getOSVersion(); if (osversion != null && osversion.length() > 0) { addLineToText(sb, newIndent, newline, "osversion", osversion); } String glibc = s.getGlibc(); if (glibc != null && glibc.length() > 0) { addLineToText(sb, newIndent, newline, "glibc", glibc); } // write out the type addLineToText(sb, newIndent, newline, "type", entry.getType().toString()); sb.append(indent).append("}").append(newline); return sb.toString(); } /** * Convenience method to add a line to the internal textual representation. * * @param sb the StringBuffer to which contents are to be added. * @param newIndent the indentation * @paran newline the newline character * @param key the key * @param value the value */ private static void addLineToText( StringBuffer sb, String newIndent, String newline, String key, String value) { sb.append(newIndent).append(key).append(" ").append(quote(value)).append(newline); } /** * Quotes a String. * * @param str the String to be quoted. * @return quoted version */ private static String quote(String str) { // maybe should use the escape class also? StringBuffer sb = new StringBuffer(); sb.append("\"").append(str).append("\""); return sb.toString(); } /** * Prints the transformations in XML format * * @param tStore the transformation store */ // Note : xml format ignores logical profiles associated with a transformation. public static void printXMLFormat(TransformationStore tStore) { BufferedWriter pw = new BufferedWriter(new OutputStreamWriter(System.out)); XMLWriter writer = new XMLWriter(pw); for (TransformationCatalogEntry entry : tStore.getEntries(null, (TCType) null)) { Executable exec = new Executable( entry.getLogicalNamespace(), entry.getLogicalName(), entry.getLogicalVersion()); exec.setArchitecture(ARCH.valueOf(entry.getSysInfo().getArchitecture().toString())); exec.setOS(OS.valueOf(entry.getSysInfo().getOS().toString())); exec.setOSVersion(entry.getSysInfo().getOSVersion()); exec.setGlibc(entry.getSysInfo().getGlibc()); if (entry.getType().equals(TCType.INSTALLED)) { exec.setInstalled(true); } else { exec.setInstalled(false); } PFN pfn = new PFN(entry.getPhysicalTransformation(), entry.getResourceId()); if (entry.getProfiles() != null) { for (Profile profile : ((List<Profile>) entry.getProfiles())) { pfn.addProfile( profile.getProfileNamespace(), profile.getProfileKey(), profile.getProfileValue()); } } exec.addPhysicalFile(pfn); exec.toXML(writer); } writer.close(); return; } /** * This method is used to convert the incoming format to YAML format.. * * @param mTCStore - store which contains the populated result * @param out - Writer object to write the object to the file. */ @SuppressWarnings("unchecked") public static void toYAMLFormat(TransformationStore mTCStore, Writer out) { ObjectMapper mapper = new ObjectMapper(new YAMLFactory()); try { List<Object> transformationData = new LinkedList<Object>(); List<TransformationCatalogEntry> entries = mTCStore.getAllEntries(); // this holds the transformations.. Map<String, Object> transformationMap = new LinkedHashMap<>(); List<Map<String, Object>> transformations = new LinkedList<Map<String, Object>>(); transformationMap.put("transformations", transformations); // this holds the containers.. Map<String, Object> containerMap = new LinkedHashMap<>(); List<Map<String, Object>> containers = new LinkedList<Map<String, Object>>(); containerMap.put("cont", containers); Set<Container> containerInfo = new HashSet<Container>(); Set<String> containerNames = new HashSet<>(); /** * Transformation entries are granular at individual site level. So ensure, we don't * populate basic transformation information again and again.. * * <p>* */ for (TransformationCatalogEntry entry : entries) { /** Check if this transformation is already populated.. * */ Map<String, Object> entryMap = getEntryMap(transformations, entry.getLogicalName()); Container container = entry.getContainer(); // if container exists add it.. if (container != null && !containerNames.contains(container.getName())) { containerInfo.add(container); containerNames.add(container.getName()); } /** * If this is the first time we are populating, create a basic info about the * transformation.. * */ if (entryMap.isEmpty()) { String nameSpace = entry.getLogicalNamespace(); if (nameSpace != null) { entryMap.put( TransformationCatalogKeywords.NAMESPACE.getReservedName(), nameSpace); } String name = entry.getLogicalName(); if (name != null) { entryMap.put(TransformationCatalogKeywords.NAME.getReservedName(), name); } String version = entry.getLogicalVersion(); if (version != null) { Double versionDobule = Double.valueOf(version); entryMap.put( TransformationCatalogKeywords.VERSION.getReservedName(), versionDobule); } } List<Profile> profiles = entry.getProfiles(); Object siteData = entryMap.get(TransformationCatalogKeywords.SITES.getReservedName()); if (siteData == null) { siteData = new LinkedList<Map<String, Object>>(); entryMap.put(TransformationCatalogKeywords.SITES.getReservedName(), siteData); } LinkedList<Map<String, Object>> siteList = (LinkedList<Map<String, Object>>) siteData; // populate site information among with profile/meta information.. siteList.add( buildSite( entry.getResourceId(), entry.getPhysicalTransformation(), container, entry.getSysInfo(), entry.getType(), profiles)); // add the site information.. entryMap.put(TransformationCatalogKeywords.SITES.getReservedName(), siteList); transformations.add(entryMap); } /** * From all the entries, we get the container information. We need to populate this * separately. * */ for (Container container : containerInfo) { HashMap<String, Object> containerData = new HashMap<>(); containerData.put( TransformationCatalogKeywords.NAME.getReservedName(), container.getName()); if (container.getImageSite() != null) { containerData.put( TransformationCatalogKeywords.CONTAINER_IMAGE_SITE.getReservedName(), container.getImageSite()); } if (container.getImageDefinitionURL() != null) { containerData.put( TransformationCatalogKeywords.CONTAINER_DOCKERFILE.getReservedName(), container.getImageDefinitionURL()); } if (container.getImageURL() != null) { containerData.put( TransformationCatalogKeywords.CONTAINER_IMAGE.getReservedName(), container.getImageURL().getURL()); } if (container.getMountPoints() != null && !container.getMountPoints().isEmpty()) { List<String> mountPoints = new LinkedList<>(); for (MountPoint point : container.getMountPoints()) { String mountPoint = point.getSourceDirectory() + ":" + point.getDestinationDirectory(); if (point.getMountOptions() != null) { mountPoint += ":" + point.getMountOptions(); } mountPoints.add(mountPoint); } containerData.put( TransformationCatalogKeywords.CONTAINER_MOUNT.getReservedName(), mountPoints); } containerData.put( TransformationCatalogKeywords.TYPE.getReservedName(), container.getType()); List<Profile> profiles = container.getProfiles(); if (profiles != null) { List<Map<String, Map<String, Object>>> profileData = buildProfiles(profiles); if (profileData != null && profileData.size() > 0) { containerData.put( TransformationCatalogKeywords.PROFILES.getReservedName(), profileData); } Map<String, Object> metaData = buildMeta(profiles); if (metaData != null && metaData.size() > 0) { containerData.put( TransformationCatalogKeywords.METADATA.getReservedName(), metaData); } } containers.add(containerData); } // if transformation exists add it.. if (transformations.size() > 0) { transformationData.add(transformationMap); } // if the container exists add it.. if (containerInfo.size() > 0) { transformationData.add(containerMap); } mapper.writeValue(out, transformationData); } catch (IOException e) { e.printStackTrace(); } } /** * This method is used to check if a logical name is already populated.. If already exists then * return the already populated result.. * * <p>* */ private static Map<String, Object> getEntryMap( List<Map<String, Object>> transformations, String logicalName) { for (Map<String, Object> transformation : transformations) { if (transformation .get(TransformationCatalogKeywords.NAME.getReservedName()) .equals(logicalName)) { transformations.remove(transformation); return transformation; } } return new LinkedHashMap<String, Object>(); } /** * This is used to build the site related information * * @param resourceId - The id of the site.. * @param pfn - The PFN of the corresponding site.. * @param container - Container info of the site.. * @param sysInfo - System Info like Architecture, OS.. * @param type - TCType of the site.. * @param profiles - Profiles to be added to the site. * @return Map<String, Object> - Object representing the site information. */ private static Map<String, Object> buildSite( String resourceId, String pfn, Container container, SysInfo sysInfo, TCType type, List<Profile> profiles) { Map<String, Object> siteInfo = new LinkedHashMap<>(); if (resourceId != null) { siteInfo.put(TransformationCatalogKeywords.NAME.getReservedName(), resourceId); } if (sysInfo.getArchitecture() != null) { siteInfo.put( TransformationCatalogKeywords.SITE_ARCHITECTURE.getReservedName(), sysInfo.getArchitecture()); } if (sysInfo.getOS() != null) { siteInfo.put(TransformationCatalogKeywords.SITE_OS.getReservedName(), sysInfo.getOS()); } if (container != null) { siteInfo.put( TransformationCatalogKeywords.SITE_CONTAINER_NAME.getReservedName(), container.getName()); } if (sysInfo.getOSRelease() != null && !sysInfo.getOSRelease().equals("")) { siteInfo.put( TransformationCatalogKeywords.SITE_OS_RELEASE.getReservedName(), sysInfo.getOSRelease()); } if (sysInfo.getOSVersion() != null && !sysInfo.getOSVersion().equals("")) { siteInfo.put( TransformationCatalogKeywords.SITE_OS_VERSION.getReservedName(), Integer.parseInt(sysInfo.getOSVersion())); } if (pfn != null) { siteInfo.put(TransformationCatalogKeywords.SITE_PFN.getReservedName(), pfn); } if (type != null) { siteInfo.put(TransformationCatalogKeywords.TYPE.getReservedName(), type); } if (profiles != null) { List<Map<String, Map<String, Object>>> profileData = buildProfiles(profiles); if (profileData != null && profileData.size() > 0) { siteInfo.put(TransformationCatalogKeywords.PROFILES.getReservedName(), profileData); } Map<String, Object> metaData = buildMeta(profiles); if (metaData != null && metaData.size() > 0) { siteInfo.put(TransformationCatalogKeywords.METADATA.getReservedName(), metaData); } } return siteInfo; } /** * This helper method is used to build the profiles from the existing profile.. Profiles will * have meta also, omit this.. * * @param profiles - List of profiles.. * @return List<Map<String, Map<String, Object>>> because of the following format: profile: - * env: APP_HOME: "/tmp/mukund" JAVA_HOME: "/bin/java.1.6" me: "with" - condor: more: "test" */ private static List<Map<String, Map<String, Object>>> buildProfiles(List<Profile> profiles) { List<Map<String, Map<String, Object>>> profileList = new LinkedList<Map<String, Map<String, Object>>>(); for (Profile profile : profiles) { String nameSpace = profile.getProfileNamespace(); if (!nameSpace.contains("meta")) { String key = profile.getProfileKey(); String value = profile.getProfileValue(); getMapForProfile(nameSpace, profileList).put(key, value); } } return profileList; } /** * This method extracts and builds the meta data information. * * @param profiles - List of profiles.. * @return Map<String, Object> - Simple key value inforamtion of meta. */ private static Map<String, Object> buildMeta(List<Profile> profiles) { Map<String, Object> metaMap = new HashMap<String, Object>(); for (Profile profile : profiles) { String nameSpace = profile.getProfileNamespace(); if (nameSpace.contains("meta")) { String key = profile.getProfileKey(); String value = profile.getProfileValue(); metaMap.put(key, value); } } return metaMap; } private static Map<String, Object> getMapForProfile( String nameSpace, List<Map<String, Map<String, Object>>> profileList) { if (profileList.isEmpty()) { Map<String, Object> keyValueMap = new HashMap<>(); Map<String, Map<String, Object>> maps = new HashMap<>(); maps.put(nameSpace, keyValueMap); profileList.add(maps); return keyValueMap; } else { for (Map<String, Map<String, Object>> maps : profileList) { if (maps.containsKey(nameSpace)) { return maps.get(nameSpace); } else { Map<String, Object> keyValueMap = new HashMap<>(); Map<String, Map<String, Object>> mapsTemp = new HashMap<>(); mapsTemp.put(nameSpace, keyValueMap); profileList.add(mapsTemp); return keyValueMap; } } } return null; } }
/* * Copyright 2020 Google LLC. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.mlkit.vision.demo.java.facedetector; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.PointF; import com.google.mlkit.vision.demo.GraphicOverlay; import com.google.mlkit.vision.demo.GraphicOverlay.Graphic; import com.google.mlkit.vision.face.Face; import com.google.mlkit.vision.face.FaceContour; import com.google.mlkit.vision.face.FaceLandmark; import com.google.mlkit.vision.face.FaceLandmark.LandmarkType; import java.util.Locale; /** * Graphic instance for rendering face position, contour, and landmarks within the associated * graphic overlay view. */ public class FaceGraphic extends Graphic { private static final float FACE_POSITION_RADIUS = 8.0f; private static final float ID_TEXT_SIZE = 30.0f; private static final float ID_Y_OFFSET = 40.0f; private static final float BOX_STROKE_WIDTH = 5.0f; private static final int NUM_COLORS = 10; private static final int[][] COLORS = new int[][] { // {Text color, background color} {Color.BLACK, Color.WHITE}, {Color.WHITE, Color.MAGENTA}, {Color.BLACK, Color.LTGRAY}, {Color.WHITE, Color.RED}, {Color.WHITE, Color.BLUE}, {Color.WHITE, Color.DKGRAY}, {Color.BLACK, Color.CYAN}, {Color.BLACK, Color.YELLOW}, {Color.WHITE, Color.BLACK}, {Color.BLACK, Color.GREEN} }; private final Paint facePositionPaint; private final Paint[] idPaints; private final Paint[] boxPaints; private final Paint[] labelPaints; private volatile Face face; FaceGraphic(GraphicOverlay overlay, Face face) { super(overlay); this.face = face; final int selectedColor = Color.WHITE; facePositionPaint = new Paint(); facePositionPaint.setColor(selectedColor); int numColors = COLORS.length; idPaints = new Paint[numColors]; boxPaints = new Paint[numColors]; labelPaints = new Paint[numColors]; for (int i = 0; i < numColors; i++) { idPaints[i] = new Paint(); idPaints[i].setColor(COLORS[i][0] /* text color */); idPaints[i].setTextSize(ID_TEXT_SIZE); boxPaints[i] = new Paint(); boxPaints[i].setColor(COLORS[i][1] /* background color */); boxPaints[i].setStyle(Paint.Style.STROKE); boxPaints[i].setStrokeWidth(BOX_STROKE_WIDTH); labelPaints[i] = new Paint(); labelPaints[i].setColor(COLORS[i][1] /* background color */); labelPaints[i].setStyle(Paint.Style.FILL); } } /** Draws the face annotations for position on the supplied canvas. */ @Override public void draw(Canvas canvas) { Face face = this.face; if (face == null) { return; } // Draws a circle at the position of the detected face, with the face's track id below. float x = translateX(face.getBoundingBox().centerX()); float y = translateY(face.getBoundingBox().centerY()); canvas.drawCircle(x, y, FACE_POSITION_RADIUS, facePositionPaint); // Calculate positions. float left = x - scale(face.getBoundingBox().width() / 2.0f); float top = y - scale(face.getBoundingBox().height() / 2.0f); float right = x + scale(face.getBoundingBox().width() / 2.0f); float bottom = y + scale(face.getBoundingBox().height() / 2.0f); float lineHeight = ID_TEXT_SIZE + BOX_STROKE_WIDTH; float yLabelOffset = (face.getTrackingId() == null) ? 0 : -lineHeight; // Decide color based on face ID int colorID = (face.getTrackingId() == null) ? 0 : Math.abs(face.getTrackingId() % NUM_COLORS); // Calculate width and height of label box float textWidth = idPaints[colorID].measureText("ID: " + face.getTrackingId()); if (face.getSmilingProbability() != null) { yLabelOffset -= lineHeight; textWidth = Math.max( textWidth, idPaints[colorID].measureText( String.format(Locale.US, "Happiness: %.2f", face.getSmilingProbability()))); } if (face.getLeftEyeOpenProbability() != null) { yLabelOffset -= lineHeight; textWidth = Math.max( textWidth, idPaints[colorID].measureText( String.format( Locale.US, "Left eye open: %.2f", face.getLeftEyeOpenProbability()))); } if (face.getRightEyeOpenProbability() != null) { yLabelOffset -= lineHeight; textWidth = Math.max( textWidth, idPaints[colorID].measureText( String.format( Locale.US, "Right eye open: %.2f", face.getRightEyeOpenProbability()))); } yLabelOffset = yLabelOffset - 3 * lineHeight; textWidth = Math.max( textWidth, idPaints[colorID].measureText( String.format(Locale.US, "EulerX: %.2f", face.getHeadEulerAngleX()))); textWidth = Math.max( textWidth, idPaints[colorID].measureText( String.format(Locale.US, "EulerY: %.2f", face.getHeadEulerAngleY()))); textWidth = Math.max( textWidth, idPaints[colorID].measureText( String.format(Locale.US, "EulerZ: %.2f", face.getHeadEulerAngleZ()))); // Draw labels canvas.drawRect( left - BOX_STROKE_WIDTH, top + yLabelOffset, left + textWidth + (2 * BOX_STROKE_WIDTH), top, labelPaints[colorID]); yLabelOffset += ID_TEXT_SIZE; canvas.drawRect(left, top, right, bottom, boxPaints[colorID]); if (face.getTrackingId() != null) { canvas.drawText("ID: " + face.getTrackingId(), left, top + yLabelOffset, idPaints[colorID]); yLabelOffset += lineHeight; } // Draws all face contours. for (FaceContour contour : face.getAllContours()) { for (PointF point : contour.getPoints()) { canvas.drawCircle( translateX(point.x), translateY(point.y), FACE_POSITION_RADIUS, facePositionPaint); } } // Draws smiling and left/right eye open probabilities. if (face.getSmilingProbability() != null) { canvas.drawText( "Smiling: " + String.format(Locale.US, "%.2f", face.getSmilingProbability()), left, top + yLabelOffset, idPaints[colorID]); yLabelOffset += lineHeight; } FaceLandmark leftEye = face.getLandmark(FaceLandmark.LEFT_EYE); if (face.getLeftEyeOpenProbability() != null) { canvas.drawText( "Left eye open: " + String.format(Locale.US, "%.2f", face.getLeftEyeOpenProbability()), left, top + yLabelOffset, idPaints[colorID]); yLabelOffset += lineHeight; } if (leftEye != null) { float leftEyeLeft = translateX(leftEye.getPosition().x) - idPaints[colorID].measureText("Left Eye") / 2.0f; canvas.drawRect( leftEyeLeft - BOX_STROKE_WIDTH, translateY(leftEye.getPosition().y) + ID_Y_OFFSET - ID_TEXT_SIZE, leftEyeLeft + idPaints[colorID].measureText("Left Eye") + BOX_STROKE_WIDTH, translateY(leftEye.getPosition().y) + ID_Y_OFFSET + BOX_STROKE_WIDTH, labelPaints[colorID]); canvas.drawText( "Left Eye", leftEyeLeft, translateY(leftEye.getPosition().y) + ID_Y_OFFSET, idPaints[colorID]); } FaceLandmark rightEye = face.getLandmark(FaceLandmark.RIGHT_EYE); if (face.getRightEyeOpenProbability() != null) { canvas.drawText( "Right eye open: " + String.format(Locale.US, "%.2f", face.getRightEyeOpenProbability()), left, top + yLabelOffset, idPaints[colorID]); yLabelOffset += lineHeight; } if (rightEye != null) { float rightEyeLeft = translateX(rightEye.getPosition().x) - idPaints[colorID].measureText("Right Eye") / 2.0f; canvas.drawRect( rightEyeLeft - BOX_STROKE_WIDTH, translateY(rightEye.getPosition().y) + ID_Y_OFFSET - ID_TEXT_SIZE, rightEyeLeft + idPaints[colorID].measureText("Right Eye") + BOX_STROKE_WIDTH, translateY(rightEye.getPosition().y) + ID_Y_OFFSET + BOX_STROKE_WIDTH, labelPaints[colorID]); canvas.drawText( "Right Eye", rightEyeLeft, translateY(rightEye.getPosition().y) + ID_Y_OFFSET, idPaints[colorID]); } canvas.drawText( "EulerX: " + face.getHeadEulerAngleX(), left, top + yLabelOffset, idPaints[colorID]); yLabelOffset += lineHeight; canvas.drawText( "EulerY: " + face.getHeadEulerAngleY(), left, top + yLabelOffset, idPaints[colorID]); yLabelOffset += lineHeight; canvas.drawText( "EulerZ: " + face.getHeadEulerAngleZ(), left, top + yLabelOffset, idPaints[colorID]); // Draw facial landmarks drawFaceLandmark(canvas, FaceLandmark.LEFT_EYE); drawFaceLandmark(canvas, FaceLandmark.RIGHT_EYE); drawFaceLandmark(canvas, FaceLandmark.LEFT_CHEEK); drawFaceLandmark(canvas, FaceLandmark.RIGHT_CHEEK); } private void drawFaceLandmark(Canvas canvas, @LandmarkType int landmarkType) { FaceLandmark faceLandmark = face.getLandmark(landmarkType); if (faceLandmark != null) { canvas.drawCircle( translateX(faceLandmark.getPosition().x), translateY(faceLandmark.getPosition().y), FACE_POSITION_RADIUS, facePositionPaint); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.cache.query; import org.apache.geode.cache.Cache; import org.apache.geode.cache.CacheFactory; import org.apache.geode.cache.Region; import org.apache.geode.cache.query.internal.DefaultQuery; import org.apache.geode.cache.query.internal.QueryObserver; import org.apache.geode.cache.query.internal.index.CompactRangeIndex; import org.apache.geode.cache.query.internal.index.IndexStore.IndexStoreEntry; import org.apache.geode.cache.query.internal.index.PrimaryKeyIndex; import org.apache.geode.cache.query.internal.index.RangeIndex; import org.apache.geode.internal.cache.LocalRegion.NonTXEntry; import org.apache.geode.internal.cache.RegionEntry; import org.apache.geode.internal.cache.persistence.query.CloseableIterator; import org.apache.geode.pdx.PdxInstance; import org.apache.geode.pdx.PdxInstanceFactory; import org.apache.geode.pdx.internal.PdxInstanceFactoryImpl; import org.apache.geode.pdx.internal.PdxInstanceImpl; import org.apache.geode.pdx.internal.PdxString; import org.apache.geode.test.junit.categories.IntegrationTest; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import java.util.*; import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT; import static org.junit.Assert.*; /** * * */ @Category(IntegrationTest.class) public class PdxStringQueryJUnitTest { private Cache c; private Region r; private String regName = "exampleRegion"; QueryService qs; QueryObserver observer; private static final int NO_INDEX = 0; private static final int INDEX_TYPE_COMPACTRANGE = 0; private static final int INDEX_TYPE_PRIMARYKEY = 2; private static final int INDEX_TYPE_RANGE = 1; @Before public void setUp() { this.c = new CacheFactory().set(MCAST_PORT, "0").create(); r = c.createRegionFactory().create(regName); qs = c.getQueryService(); } @After public void tearDown() { this.c.close(); } @Test public void testQueriesPdxInstances() throws Exception { putPdxInstances(); executeQueriesValidateResults(NO_INDEX); r.clear(); } @Test public void testQueriesHeterogenousObjects() throws Exception { putHeterogeneousObjects(); executeQueriesValidateResults(NO_INDEX); r.clear(); } @Test public void testQueriesWithCompactRangeIndexPdxInstances() throws Exception { Index index = qs.createIndex("index1", "secId", "/exampleRegion"); assertTrue(index instanceof CompactRangeIndex); putPdxInstances(); CloseableIterator<IndexStoreEntry> indexIterator = null; try { indexIterator = ((CompactRangeIndex) index).getIndexStorage().iterator(null); while (indexIterator.hasNext()) { assertTrue(indexIterator.next().getDeserializedKey() instanceof PdxString); } } finally { if (indexIterator != null) { indexIterator.close(); } } executeQueriesValidateResults(INDEX_TYPE_COMPACTRANGE); r.clear(); } @Test public void testQueriesWithCompactRangeIndexPdxInstancesREUpdateInProgress() throws Exception { Index index = qs.createIndex("index1", "secId", "/exampleRegion"); assertTrue(index instanceof CompactRangeIndex); putPdxInstancesWithREUpdateInProgress(); CloseableIterator<IndexStoreEntry> indexIterator = null; try { indexIterator = ((CompactRangeIndex) index).getIndexStorage().iterator(null); while (indexIterator.hasNext()) { assertTrue(indexIterator.next().getDeserializedKey() instanceof PdxString); } } finally { if (indexIterator != null) { indexIterator.close(); } } executeQueriesValidateResults(INDEX_TYPE_COMPACTRANGE); r.clear(); } @Test public void testQueriesWithCompactRangeIndexHeterogenousObjects() throws Exception { putHeterogeneousObjects(); executeQueriesValidateResults(INDEX_TYPE_COMPACTRANGE); r.clear(); } @Test public void testQueriesWithRangeIndex() throws Exception { Index index = qs.createIndex("index2", "p.secId", "/exampleRegion p, p.positions.values"); assertTrue(index instanceof RangeIndex); PdxInstanceFactory pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false); pf.writeInt("ID", 111); pf.writeString("secId", "IBM"); pf.writeString("status", "active"); HashMap positions = new HashMap(); positions.put("price", "50"); positions.put("price", "60"); pf.writeObject("positions", positions); PdxInstance pi = pf.create(); r.put("IBM", pi); positions = new HashMap(); positions.put("price", "100"); positions.put("price", "120"); r.put("YHOO", new TestObject(222, "YHOO", positions, "inactive")); pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false); pf.writeInt("ID", 333); pf.writeString("secId", "GOOGL"); pf.writeString("status", "active"); positions = new HashMap(); positions.put("price", "130"); positions.put("price", "150"); pf.writeObject("positions", positions); pi = pf.create(); positions = new HashMap(); positions.put("price", "200"); positions.put("price", "220"); r.put("VMW", new TestObject(111, "VMW", positions, "inactive")); r.put("GOOGL", pi); Map map = ((RangeIndex) index).getValueToEntriesMap(); for (Object key : map.keySet()) { assertTrue(key instanceof PdxString); } executeQueriesValidateResults(INDEX_TYPE_RANGE); qs.removeIndex(index); r.clear(); } @Test public void testQueriesWithRangeIndexWithREUpdateInProgress() throws Exception { Index index = qs.createIndex("index2", "p.secId", "/exampleRegion p, p.positions.values"); assertTrue(index instanceof RangeIndex); PdxInstanceFactory pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false); pf.writeInt("ID", 111); pf.writeString("secId", "IBM"); pf.writeString("status", "active"); HashMap positions = new HashMap(); positions.put("price", "50"); positions.put("price", "60"); pf.writeObject("positions", positions); PdxInstance pi = pf.create(); r.put("IBM", pi); positions = new HashMap(); positions.put("price", "100"); positions.put("price", "120"); r.put("YHOO", new TestObject(222, "YHOO", positions, "inactive")); pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false); pf.writeInt("ID", 333); pf.writeString("secId", "GOOGL"); pf.writeString("status", "active"); positions = new HashMap(); positions.put("price", "130"); positions.put("price", "150"); pf.writeObject("positions", positions); pi = pf.create(); positions = new HashMap(); positions.put("price", "200"); positions.put("price", "220"); r.put("VMW", new TestObject(111, "VMW", positions, "inactive")); r.put("GOOGL", pi); makeREUpdateInProgress(); Map map = ((RangeIndex) index).getValueToEntriesMap(); for (Object key : map.keySet()) { assertTrue(key instanceof PdxString); } DefaultQuery.setPdxReadSerialized(true); executeQueriesValidateResults(INDEX_TYPE_RANGE); qs.removeIndex(index); r.clear(); } @Test public void testQueriesWithPrimaryKeyIndex() throws Exception { Index index = qs.createKeyIndex("index3", "secId", "/exampleRegion"); assertTrue(index instanceof PrimaryKeyIndex); putPdxInstances(); executeQueriesValidateResults(INDEX_TYPE_PRIMARYKEY); r.clear(); putHeterogeneousObjects(); executeQueriesValidateResults(INDEX_TYPE_PRIMARYKEY); qs.removeIndex(index); r.clear(); } @Test public void testStringMethods() throws Exception { putPdxInstances(); String queries[] = {"select secId from /exampleRegion where secId.toLowerCase() = 'ibm'", "select secId from /exampleRegion where secId.startsWith('I')"}; for (int i = 0; i < queries.length; i++) { SelectResults res = (SelectResults) qs.newQuery(queries[i]).execute(); assertEquals("Incorrect result size returned for query. " + queries[i], 1, res.size()); validateStringResult("IBM", res.iterator().next()); } r.clear(); } public void putPdxInstances() throws Exception { PdxInstanceFactory pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false); pf.writeInt("ID", 111); pf.writeString("status", "active"); pf.writeString("secId", "IBM"); PdxInstance pi = pf.create(); r.put("IBM", pi); pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false); pf.writeInt("ID", 222); pf.writeString("status", "inactive"); pf.writeString("secId", "YHOO"); pi = pf.create(); r.put("YHOO", pi); pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false); pf.writeInt("ID", 333); pf.writeString("status", "active"); pf.writeString("secId", "GOOGL"); pi = pf.create(); r.put("GOOGL", pi); pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false); pf.writeInt("ID", 111); pf.writeString("status", "inactive"); pf.writeString("secId", "VMW"); pi = pf.create(); r.put("VMW", pi); } public void putPdxInstancesWithREUpdateInProgress() throws Exception { PdxInstanceFactory pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false); pf.writeInt("ID", 111); pf.writeString("status", "active"); pf.writeString("secId", "IBM"); PdxInstance pi = pf.create(); r.put("IBM", pi); pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false); pf.writeInt("ID", 222); pf.writeString("status", "inactive"); pf.writeString("secId", "YHOO"); pi = pf.create(); r.put("YHOO", pi); pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false); pf.writeInt("ID", 333); pf.writeString("status", "active"); pf.writeString("secId", "GOOGL"); pi = pf.create(); r.put("GOOGL", pi); pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false); pf.writeInt("ID", 111); pf.writeString("status", "inactive"); pf.writeString("secId", "VMW"); pi = pf.create(); r.put("VMW", pi); makeREUpdateInProgress(); } public void makeREUpdateInProgress() { Iterator entryItr = r.entrySet().iterator(); while (entryItr.hasNext()) { Region.Entry nonTxEntry = (Region.Entry) entryItr.next(); RegionEntry entry = ((NonTXEntry) nonTxEntry).getRegionEntry(); entry.setUpdateInProgress(true); assertTrue(entry.isUpdateInProgress()); } } public void putHeterogeneousObjects() throws Exception { PdxInstanceFactory pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false); pf.writeInt("ID", 111); pf.writeString("secId", "IBM"); pf.writeString("status", "active"); PdxInstance pi = pf.create(); r.put("IBM", pi); r.put("YHOO", new TestObject(222, "YHOO", "inactive")); r.put("GOOGL", new TestObject(333, "GOOGL", "active")); pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false); pf.writeInt("ID", 111); pf.writeString("secId", "VMW"); pf.writeString("status", "inactive"); pi = pf.create(); r.put("VMW", pi); } private void executeQueriesValidateResults(int indexType) throws Exception { DefaultQuery.setPdxReadSerialized(true); String[] query = {"select count(*) from /exampleRegion", "select count(*) from /exampleRegion p, p.positions.values v", "select count(*) from /exampleRegion"}; SelectResults res = (SelectResults) qs.newQuery(query[indexType]).execute(); assertEquals(4, res.iterator().next()); query = new String[] {"select secId from /exampleRegion where secId = 'IBM'", "select p.secId from /exampleRegion p, p.positions.values v where p.secId = 'IBM'", "select secId from /exampleRegion where secId = 'IBM'"}; res = (SelectResults) qs.newQuery(query[indexType]).execute(); assertEquals(1, res.size()); validateStringResult("IBM", res.iterator().next()); query = new String[] { "select p.secId from /exampleRegion p where p.secId = ELEMENT(select e.secId from /exampleRegion e where e.secId = 'IBM') ", "select p.secId from /exampleRegion p, p.positions.values v where p.secId = ELEMENT(select p1.secId from /exampleRegion p1, p.positions.values v1 where p1.secId = 'IBM')", "select p.secId from /exampleRegion p where p.secId = ELEMENT(select e.secId from /exampleRegion e where e.secId = 'IBM' )"}; res = (SelectResults) qs.newQuery(query[indexType]).execute(); assertEquals(1, res.size()); validateStringResult("IBM", res.iterator().next()); query = new String[] {"select secId from /exampleRegion where secId LIKE 'VMW'", "select p.secId from /exampleRegion p, p.positions.values v where p.secId LIKE 'VMW'", "select secId from /exampleRegion where secId LIKE 'VMW'"}; res = (SelectResults) qs.newQuery(query[indexType]).execute(); assertEquals(1, res.size()); validateStringResult("VMW", res.iterator().next()); query = new String[] {"select secId from /exampleRegion where secId LIKE 'VM%'", "select p.secId from /exampleRegion p, p.positions.values v where p.secId LIKE 'VM%'", "select secId from /exampleRegion where secId LIKE 'VM%'"}; res = (SelectResults) qs.newQuery(query[indexType]).execute(); assertEquals(1, res.size()); validateStringResult("VMW", res.iterator().next()); query = new String[] {"select secId from /exampleRegion where secId IN SET ('YHOO', 'VMW')", "select p.secId from /exampleRegion p, p.positions.values v where p.secId IN SET ('YHOO', 'VMW')", "select secId from /exampleRegion where secId IN SET ('YHOO', 'VMW')"}; res = (SelectResults) qs.newQuery(query[indexType]).execute(); assertEquals(2, res.size()); List secIdsList = new ArrayList(); secIdsList.add("VMW"); secIdsList.add("YHOO"); Iterator iter = res.iterator(); while (iter.hasNext()) { validateResult(secIdsList, iter.next()); } query = new String[] { "select p.secId from /exampleRegion p where p.secId IN (select e.secId from /exampleRegion e where e.secId ='YHOO' or e.secId = 'VMW')", "select p.secId from /exampleRegion p, p.positions.values v where p.secId IN (select e.secId from /exampleRegion e where e.secId ='YHOO' or e.secId = 'VMW')", "select p.secId from /exampleRegion p where p.secId IN (select e.secId from /exampleRegion e where e.secId ='YHOO' or e.secId = 'VMW')"}; res = (SelectResults) qs.newQuery(query[indexType]).execute(); assertEquals(2, res.size()); secIdsList = new ArrayList(); secIdsList.add("VMW"); secIdsList.add("YHOO"); iter = res.iterator(); while (iter.hasNext()) { validateResult(secIdsList, iter.next()); } query = new String[] {"select secId, status from /exampleRegion where secId = 'IBM'", "select p.secId, p.status from /exampleRegion p, p.positions.values v where p.secId = 'IBM'", "select secId, status from /exampleRegion where secId = 'IBM'"}; res = (SelectResults) qs.newQuery(query[indexType]).execute(); assertEquals(1, res.size()); secIdsList = new ArrayList(); secIdsList.add("active"); secIdsList.add("IBM"); Struct rs = (Struct) res.iterator().next(); Object o1 = rs.getFieldValues()[0]; Object o2 = rs.getFieldValues()[1]; validateResult(secIdsList, o1); validateResult(secIdsList, o2); query = new String[] {"select secId from /exampleRegion where secId < 'YHOO'", "select p.secId from /exampleRegion p, p.positions.values v where p.secId < 'YHOO'", "select secId from /exampleRegion where secId < 'YHOO'"}; res = (SelectResults) qs.newQuery(query[indexType]).execute(); assertEquals(3, res.size()); iter = res.iterator(); secIdsList.clear(); secIdsList.add("VMW"); secIdsList.add("GOOGL"); secIdsList.add("IBM"); while (iter.hasNext()) { validateResult(secIdsList, iter.next()); } query = new String[] {"select secId from /exampleRegion where 'YHOO' > secId", "select p.secId from /exampleRegion p, p.positions.values v where 'YHOO' > p.secId", "select secId from /exampleRegion where 'YHOO' > secId"}; res = (SelectResults) qs.newQuery(query[indexType]).execute(); assertEquals(3, res.size()); iter = res.iterator(); secIdsList.clear(); secIdsList.add("VMW"); secIdsList.add("GOOGL"); secIdsList.add("IBM"); while (iter.hasNext()) { validateResult(secIdsList, iter.next()); } query = new String[] {"select secId from /exampleRegion where secId > 'IBM'", "select p.secId from /exampleRegion p, p.positions.values v where p.secId > 'IBM'", "select secId from /exampleRegion where secId > 'IBM'"}; res = (SelectResults) qs.newQuery(query[indexType]).execute(); assertEquals(2, res.size()); iter = res.iterator(); secIdsList.clear(); secIdsList.add("VMW"); secIdsList.add("YHOO"); while (iter.hasNext()) { validateResult(secIdsList, iter.next()); } query = new String[] {"select secId from /exampleRegion where secId > 'IBM' or ID=333", "select p.secId from /exampleRegion p, p.positions.values v where p.secId > 'IBM' or p.ID=333", "select secId from /exampleRegion where secId = 'VMW' or secId = 'YHOO' or secId = 'GOOGL'"}; res = (SelectResults) qs.newQuery(query[indexType]).execute(); assertEquals(3, res.size()); iter = res.iterator(); secIdsList.clear(); secIdsList.add("VMW"); secIdsList.add("YHOO"); secIdsList.add("GOOGL"); while (iter.hasNext()) { validateResult(secIdsList, iter.next()); } query = new String[] {"select secId from /exampleRegion where secId > 'IBM' and secId < 'YHOO'", "select p.secId from /exampleRegion p, p.positions.values v where p.secId > 'IBM' and p.secId < 'YHOO'", "select secId from /exampleRegion where secId > 'IBM' and secId < 'YHOO'"}; res = (SelectResults) qs.newQuery(query[indexType]).execute(); assertEquals(1, res.size()); iter = res.iterator(); secIdsList.clear(); secIdsList.add("VMW"); while (iter.hasNext()) { validateResult(secIdsList, iter.next()); } query = new String[] {"select secId from /exampleRegion where ID = 111", "select p.secId from /exampleRegion p, p.positions.values v where p.ID = 111", "select secId from /exampleRegion where secId = 'VMW' or secId = 'IBM'"}; res = (SelectResults) qs.newQuery(query[indexType]).execute(); assertEquals(2, res.size()); iter = res.iterator(); secIdsList.clear(); secIdsList.add("VMW"); secIdsList.add("IBM"); while (iter.hasNext()) { validateResult(secIdsList, iter.next()); } query = new String[] {"select distinct ID from /exampleRegion where ID = 111", "select distinct p.ID from /exampleRegion p, p.positions.values v where p.ID = 111", "select distinct secId from /exampleRegion where secId = 'VMW'"}; res = (SelectResults) qs.newQuery(query[indexType]).execute(); assertEquals(1, res.size()); query = new String[] {"select ID from /exampleRegion where ID = 111 limit 1", "select p.ID from /exampleRegion p, p.positions.values v where p.ID = 111 limit 1", "select secId from /exampleRegion where secId = 'VMW' limit 1"}; res = (SelectResults) qs.newQuery(query[indexType]).execute(); assertEquals(1, res.size()); query = new String[] {"select distinct secId from /exampleRegion order by secId", "select distinct p.secId from /exampleRegion p, p.positions.values order by p.secId", "select distinct secId from /exampleRegion order by secId"}; res = (SelectResults) qs.newQuery(query[indexType]).execute(); assertEquals(4, res.size()); iter = res.iterator(); String[] secIds = {"GOOGL", "IBM", "VMW", "YHOO"}; int i = 0; while (iter.hasNext()) { validateStringResult(secIds[i++], iter.next()); } query = new String[] {"select distinct * from /exampleRegion order by secId", "select distinct * from /exampleRegion p, p.positions.values v order by p.secId", "select distinct * from /exampleRegion order by secId"}; res = (SelectResults) qs.newQuery(query[indexType]).execute(); assertEquals(4, res.size()); iter = res.iterator(); secIds = new String[] {"GOOGL", "IBM", "VMW", "YHOO"}; i = 0; while (iter.hasNext()) { Object o = iter.next(); if (o instanceof PdxInstanceImpl) { validateStringResult(secIds[i++], ((PdxInstanceImpl) o).getField("secId")); } else if (o instanceof TestObject) { validateStringResult(secIds[i++], ((TestObject) o).getSecId()); } } query = new String[] {"select distinct secId from /exampleRegion order by secId limit 2", "select distinct p.secId from /exampleRegion p, p.positions.values v order by p.secId limit 2", "select distinct secId from /exampleRegion order by secId limit 2"}; res = (SelectResults) qs.newQuery(query[indexType]).execute(); assertEquals(2, res.size()); iter = res.iterator(); secIds = new String[] {"GOOGL", "IBM"}; i = 0; while (iter.hasNext()) { validateStringResult(secIds[i++], iter.next()); } query = new String[] {"select secId from /exampleRegion where NOT (secId = 'VMW')", "select p.secId from /exampleRegion p, p.positions.values v where NOT (p.secId = 'VMW')", "select secId from /exampleRegion where NOT (secId = 'VMW')"}; res = (SelectResults) qs.newQuery(query[indexType]).execute(); assertEquals(3, res.size()); iter = res.iterator(); secIdsList.clear(); secIdsList.add("YHOO"); secIdsList.add("IBM"); secIdsList.add("GOOGL"); while (iter.hasNext()) { validateResult(secIdsList, iter.next()); } query = new String[] {"select secId from /exampleRegion p where NOT (p.ID IN SET(111, 222)) ", "select p.secId from /exampleRegion p, p.positions.values v where NOT (p.ID IN SET(111, 222)) ", "select secId from /exampleRegion where NOT (secId IN SET('VMW','IBM','YHOO'))"}; res = (SelectResults) qs.newQuery(query[indexType]).execute(); assertEquals(1, res.size()); iter = res.iterator(); secIdsList.clear(); secIdsList.add("GOOGL"); while (iter.hasNext()) { validateResult(secIdsList, iter.next()); } query = new String[] {"select secId from /exampleRegion where secId = $1", "select p.secId from /exampleRegion p, p.positions.values v where p.secId = $1", "select secId from /exampleRegion where secId = $1"}; res = (SelectResults) qs.newQuery(query[indexType]).execute(new Object[] {"IBM"}); assertEquals(1, res.size()); validateStringResult("IBM", res.iterator().next()); query = new String[] {"select secId from /exampleRegion where secId > $1 and secId < $2", "select p.secId from /exampleRegion p, p.positions.values v where p.secId > $1 and p.secId < $2", "select secId from /exampleRegion where secId > $1 and secId < $2"}; res = (SelectResults) qs.newQuery(query[indexType]).execute(new Object[] {"IBM", "YHOO"}); assertEquals(1, res.size()); iter = res.iterator(); secIdsList.clear(); secIdsList.add("VMW"); while (iter.hasNext()) { validateResult(secIdsList, iter.next()); } DefaultQuery.setPdxReadSerialized(false); } private void validateStringResult(Object str1, Object str2) { if (str1 instanceof String && str2 instanceof PdxString) { assertEquals(str1, str2.toString()); } else if (str1 instanceof PdxString && str2 instanceof String) { assertEquals(str1.toString(), str2); } else if ((str1 instanceof PdxString && str2 instanceof PdxString) || (str1 instanceof String && str2 instanceof String)) { assertEquals(str1, str2); } else { fail("Not String or PdxString objects"); } } private void validateResult(List list, Object str2) { if (str2 instanceof PdxString) { str2 = str2.toString(); } assertTrue(list.contains(str2)); } public static class TestObject { private int ID; private String secId; private String status; private Map positions; public TestObject(int id, String secId, String status) { this.ID = id; this.secId = secId; this.status = status; } public TestObject(int id, String secId, Map positions, String status) { this.ID = id; this.secId = secId; this.positions = positions; this.status = status; } public int getID() { return ID; } public void setID(int iD) { ID = iD; } public String getSecId() { return secId; } public void setSecId(String secId) { this.secId = secId; } public Map getPositions() { return positions; } public void setPositions(Map positions) { this.positions = positions; } public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } } }
package com.vladsch.flexmark.html; import com.vladsch.flexmark.ast.HtmlBlock; import com.vladsch.flexmark.ast.HtmlInline; import com.vladsch.flexmark.html.renderer.*; import com.vladsch.flexmark.util.ast.Document; import com.vladsch.flexmark.util.ast.IRender; import com.vladsch.flexmark.util.ast.Node; import com.vladsch.flexmark.util.builder.BuilderBase; import com.vladsch.flexmark.util.data.*; import com.vladsch.flexmark.util.dependency.DependencyResolver; import com.vladsch.flexmark.util.format.TrackedOffset; import com.vladsch.flexmark.util.format.TrackedOffsetUtils; import com.vladsch.flexmark.util.html.Attributes; import com.vladsch.flexmark.util.html.MutableAttributes; import com.vladsch.flexmark.util.misc.Extension; import com.vladsch.flexmark.util.misc.Pair; import com.vladsch.flexmark.util.sequence.Escaping; import com.vladsch.flexmark.util.sequence.LineAppendable; import com.vladsch.flexmark.util.sequence.TagRange; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; /** * Renders a tree of nodes to HTML. * <p> * Start with the {@link #builder} method to configure the renderer. Example: * <pre><code> * HtmlRenderer renderer = HtmlRenderer.builder().escapeHtml(true).build(); * renderer.render(node); * </code></pre> */ @SuppressWarnings("WeakerAccess") public class HtmlRenderer implements IRender { final public static DataKey<String> SOFT_BREAK = new DataKey<>("SOFT_BREAK", "\n"); final public static DataKey<String> HARD_BREAK = new DataKey<>("HARD_BREAK", "<br />\n"); final public static NullableDataKey<String> STRONG_EMPHASIS_STYLE_HTML_OPEN = new NullableDataKey<>("STRONG_EMPHASIS_STYLE_HTML_OPEN"); final public static NullableDataKey<String> STRONG_EMPHASIS_STYLE_HTML_CLOSE = new NullableDataKey<>("STRONG_EMPHASIS_STYLE_HTML_CLOSE"); final public static NullableDataKey<String> EMPHASIS_STYLE_HTML_OPEN = new NullableDataKey<>("EMPHASIS_STYLE_HTML_OPEN"); final public static NullableDataKey<String> EMPHASIS_STYLE_HTML_CLOSE = new NullableDataKey<>("EMPHASIS_STYLE_HTML_CLOSE"); final public static NullableDataKey<String> CODE_STYLE_HTML_OPEN = new NullableDataKey<>("CODE_STYLE_HTML_OPEN"); final public static NullableDataKey<String> CODE_STYLE_HTML_CLOSE = new NullableDataKey<>("CODE_STYLE_HTML_CLOSE"); final public static NullableDataKey<String> INLINE_CODE_SPLICE_CLASS = new NullableDataKey<>("INLINE_CODE_SPLICE_CLASS"); final public static DataKey<Boolean> PERCENT_ENCODE_URLS = SharedDataKeys.PERCENT_ENCODE_URLS; final public static DataKey<Integer> INDENT_SIZE = SharedDataKeys.INDENT_SIZE; final public static DataKey<Boolean> ESCAPE_HTML = new DataKey<>("ESCAPE_HTML", false); final public static DataKey<Boolean> ESCAPE_HTML_BLOCKS = new DataKey<>("ESCAPE_HTML_BLOCKS", ESCAPE_HTML); final public static DataKey<Boolean> ESCAPE_HTML_COMMENT_BLOCKS = new DataKey<>("ESCAPE_HTML_COMMENT_BLOCKS", ESCAPE_HTML_BLOCKS); final public static DataKey<Boolean> ESCAPE_INLINE_HTML = new DataKey<>("ESCAPE_HTML_BLOCKS", ESCAPE_HTML); final public static DataKey<Boolean> ESCAPE_INLINE_HTML_COMMENTS = new DataKey<>("ESCAPE_INLINE_HTML_COMMENTS", ESCAPE_INLINE_HTML); final public static DataKey<Boolean> SUPPRESS_HTML = new DataKey<>("SUPPRESS_HTML", false); final public static DataKey<Boolean> SUPPRESS_HTML_BLOCKS = new DataKey<>("SUPPRESS_HTML_BLOCKS", SUPPRESS_HTML); final public static DataKey<Boolean> SUPPRESS_HTML_COMMENT_BLOCKS = new DataKey<>("SUPPRESS_HTML_COMMENT_BLOCKS", SUPPRESS_HTML_BLOCKS); final public static DataKey<Boolean> SUPPRESS_INLINE_HTML = new DataKey<>("SUPPRESS_INLINE_HTML", SUPPRESS_HTML); final public static DataKey<Boolean> SUPPRESS_INLINE_HTML_COMMENTS = new DataKey<>("SUPPRESS_INLINE_HTML_COMMENTS", SUPPRESS_INLINE_HTML); final public static DataKey<Boolean> SOURCE_WRAP_HTML = new DataKey<>("SOURCE_WRAP_HTML", false); final public static DataKey<Boolean> SOURCE_WRAP_HTML_BLOCKS = new DataKey<>("SOURCE_WRAP_HTML_BLOCKS", SOURCE_WRAP_HTML); final public static DataKey<Boolean> HEADER_ID_GENERATOR_RESOLVE_DUPES = SharedDataKeys.HEADER_ID_GENERATOR_RESOLVE_DUPES; final public static DataKey<String> HEADER_ID_GENERATOR_TO_DASH_CHARS = SharedDataKeys.HEADER_ID_GENERATOR_TO_DASH_CHARS; final public static DataKey<String> HEADER_ID_GENERATOR_NON_DASH_CHARS = SharedDataKeys.HEADER_ID_GENERATOR_NON_DASH_CHARS; final public static DataKey<Boolean> HEADER_ID_GENERATOR_NO_DUPED_DASHES = SharedDataKeys.HEADER_ID_GENERATOR_NO_DUPED_DASHES; final public static DataKey<Boolean> HEADER_ID_GENERATOR_NON_ASCII_TO_LOWERCASE = SharedDataKeys.HEADER_ID_GENERATOR_NON_ASCII_TO_LOWERCASE; final public static DataKey<Boolean> HEADER_ID_REF_TEXT_TRIM_LEADING_SPACES = SharedDataKeys.HEADER_ID_REF_TEXT_TRIM_LEADING_SPACES; final public static DataKey<Boolean> HEADER_ID_REF_TEXT_TRIM_TRAILING_SPACES = SharedDataKeys.HEADER_ID_REF_TEXT_TRIM_TRAILING_SPACES; final public static DataKey<Boolean> HEADER_ID_ADD_EMOJI_SHORTCUT = SharedDataKeys.HEADER_ID_ADD_EMOJI_SHORTCUT; final public static DataKey<Boolean> RENDER_HEADER_ID = SharedDataKeys.RENDER_HEADER_ID; final public static DataKey<Boolean> GENERATE_HEADER_ID = SharedDataKeys.GENERATE_HEADER_ID; final public static DataKey<Boolean> DO_NOT_RENDER_LINKS = SharedDataKeys.DO_NOT_RENDER_LINKS; final public static DataKey<String> FENCED_CODE_LANGUAGE_CLASS_PREFIX = new DataKey<>("FENCED_CODE_LANGUAGE_CLASS_PREFIX", "language-"); // prefix to add to unmapped info strings final public static DataKey<HashMap<String, String>> FENCED_CODE_LANGUAGE_CLASS_MAP = new DataKey<>("FENCED_CODE_LANGUAGE_CLASS_MAP", HashMap::new); // info to language class mapping final public static DataKey<String> FENCED_CODE_NO_LANGUAGE_CLASS = new DataKey<>("FENCED_CODE_NO_LANGUAGE_CLASS", ""); final public static DataKey<String> FENCED_CODE_LANGUAGE_DELIMITERS = new DataKey<>("FENCED_CODE_LANGUAGE_DELIMITERS", " \t"); final public static DataKey<String> SOURCE_POSITION_ATTRIBUTE = new DataKey<>("SOURCE_POSITION_ATTRIBUTE", ""); final public static DataKey<Boolean> SOURCE_POSITION_PARAGRAPH_LINES = new DataKey<>("SOURCE_POSITION_PARAGRAPH_LINES", false); final public static DataKey<String> TYPE = new DataKey<>("TYPE", "HTML"); final public static DataKey<ArrayList<TagRange>> TAG_RANGES = new DataKey<>("TAG_RANGES", ArrayList::new); final public static DataKey<Boolean> RECHECK_UNDEFINED_REFERENCES = new DataKey<>("RECHECK_UNDEFINED_REFERENCES", false); final public static DataKey<Boolean> OBFUSCATE_EMAIL = new DataKey<>("OBFUSCATE_EMAIL", false); final public static DataKey<Boolean> OBFUSCATE_EMAIL_RANDOM = new DataKey<>("OBFUSCATE_EMAIL_RANDOM", true); final public static DataKey<Boolean> HTML_BLOCK_OPEN_TAG_EOL = new DataKey<>("HTML_BLOCK_OPEN_TAG_EOL", true); final public static DataKey<Boolean> HTML_BLOCK_CLOSE_TAG_EOL = new DataKey<>("HTML_BLOCK_CLOSE_TAG_EOL", true); final public static DataKey<Boolean> UNESCAPE_HTML_ENTITIES = new DataKey<>("UNESCAPE_HTML_ENTITIES", true); final public static DataKey<String> AUTOLINK_WWW_PREFIX = new DataKey<>("AUTOLINK_WWW_PREFIX", "http://"); // regex for suppressed link prefixes final public static DataKey<String> SUPPRESSED_LINKS = new DataKey<>("SUPPRESSED_LINKS", "javascript:.*"); final public static DataKey<Boolean> NO_P_TAGS_USE_BR = new DataKey<>("NO_P_TAGS_USE_BR", false); final public static DataKey<Boolean> EMBEDDED_ATTRIBUTE_PROVIDER = new DataKey<>("EMBEDDED_ATTRIBUTE_PROVIDER", true); /** * output control for FormattingAppendable, see {@link LineAppendable#setOptions(int)} */ final public static DataKey<Integer> FORMAT_FLAGS = new DataKey<>("RENDERER_FORMAT_FLAGS", LineAppendable.F_TRIM_LEADING_WHITESPACE); final public static DataKey<Integer> MAX_TRAILING_BLANK_LINES = SharedDataKeys.RENDERER_MAX_TRAILING_BLANK_LINES; final public static DataKey<Integer> MAX_BLANK_LINES = SharedDataKeys.RENDERER_MAX_BLANK_LINES; // Use LineFormattingAppendable values instead, // NOTE: ALLOW_LEADING_WHITESPACE is now inverted and named F_TRIM_LEADING_WHITESPACE @Deprecated final public static int CONVERT_TABS = LineAppendable.F_CONVERT_TABS; @Deprecated final public static int COLLAPSE_WHITESPACE = LineAppendable.F_COLLAPSE_WHITESPACE; @Deprecated final public static int SUPPRESS_TRAILING_WHITESPACE = LineAppendable.F_TRIM_TRAILING_WHITESPACE; @Deprecated final public static int PASS_THROUGH = LineAppendable.F_PASS_THROUGH; // @Deprecated final public static int ALLOW_LEADING_WHITESPACE = LineAppendable.F_TRIM_LEADING_WHITESPACE; @Deprecated final public static int FORMAT_ALL = LineAppendable.F_FORMAT_ALL; /** * Stores pairs of equivalent renderer types to allow extensions to resolve types not known to them * <p> * Pair contains: rendererType, equivalentType */ final public static DataKey<List<Pair<String, String>>> RENDERER_TYPE_EQUIVALENCE = new DataKey<>("RENDERER_TYPE_EQUIVALENCE", Collections.emptyList()); // Use LineFormattingAppendable values instead @Deprecated final public static int FORMAT_CONVERT_TABS = LineAppendable.F_CONVERT_TABS; @Deprecated final public static int FORMAT_COLLAPSE_WHITESPACE = LineAppendable.F_COLLAPSE_WHITESPACE; @Deprecated final public static int FORMAT_SUPPRESS_TRAILING_WHITESPACE = LineAppendable.F_TRIM_TRAILING_WHITESPACE; @Deprecated final public static int FORMAT_ALL_OPTIONS = LineAppendable.F_FORMAT_ALL; // Experimental, not tested final public static DataKey<List<TrackedOffset>> TRACKED_OFFSETS = new DataKey<>("TRACKED_OFFSETS", Collections.emptyList()); // now not final only to allow disposal of resources final List<AttributeProviderFactory> attributeProviderFactories; final List<DelegatingNodeRendererFactoryWrapper> nodeRendererFactories; final List<LinkResolverFactory> linkResolverFactories; final HeaderIdGeneratorFactory htmlIdGeneratorFactory; final HtmlRendererOptions htmlOptions; final DataHolder options; HtmlRenderer(@NotNull Builder builder) { this.options = builder.toImmutable(); this.htmlOptions = new HtmlRendererOptions(this.options); this.htmlIdGeneratorFactory = builder.htmlIdGeneratorFactory; // resolve renderer dependencies List<DelegatingNodeRendererFactoryWrapper> nodeRenderers = new ArrayList<>(builder.nodeRendererFactories.size()); for (int i = builder.nodeRendererFactories.size() - 1; i >= 0; i--) { NodeRendererFactory nodeRendererFactory = builder.nodeRendererFactories.get(i); nodeRenderers.add(new DelegatingNodeRendererFactoryWrapper(nodeRenderers, nodeRendererFactory)); } // Add as last. This means clients can override the rendering of core nodes if they want by default CoreNodeRenderer.Factory nodeRendererFactory = new CoreNodeRenderer.Factory(); nodeRenderers.add(new DelegatingNodeRendererFactoryWrapper(nodeRenderers, nodeRendererFactory)); nodeRendererFactories = DependencyResolver.resolveFlatDependencies(nodeRenderers, null, dependent -> dependent.getFactory().getClass()); // HACK: but for now works boolean addEmbedded = !builder.attributeProviderFactories.containsKey(EmbeddedAttributeProvider.Factory.getClass()); List<AttributeProviderFactory> values = new ArrayList<>(builder.attributeProviderFactories.values()); if (addEmbedded && EMBEDDED_ATTRIBUTE_PROVIDER.get(options)) { // add it first so the rest can override it if needed values.add(0, EmbeddedAttributeProvider.Factory); } this.attributeProviderFactories = DependencyResolver.resolveFlatDependencies(values, null, null); this.linkResolverFactories = DependencyResolver.resolveFlatDependencies(builder.linkResolverFactories, null, null); } /** * Create a new builder for configuring an {@link HtmlRenderer}. * * @return a builder */ public static @NotNull Builder builder() { return new Builder(); } /** * Create a new builder for configuring an {@link HtmlRenderer}. * * @param options initialization options * @return a builder */ public static @NotNull Builder builder(@Nullable DataHolder options) { return new Builder(options); } @NotNull @Override public DataHolder getOptions() { return options; } /** * Render a node to the appendable * * @param node node to render * @param output appendable to use for the output */ public void render(@NotNull Node node, @NotNull Appendable output) { render(node, output, htmlOptions.maxTrailingBlankLines); } /** * Render a node to the appendable * * @param node node to render * @param output appendable to use for the output */ public void render(@NotNull Node node, @NotNull Appendable output, int maxTrailingBlankLines) { HtmlWriter htmlWriter = new HtmlWriter(output, htmlOptions.indentSize, htmlOptions.formatFlags, !htmlOptions.htmlBlockOpenTagEol, !htmlOptions.htmlBlockCloseTagEol); MainNodeRenderer renderer = new MainNodeRenderer(options, htmlWriter, node.getDocument()); if (renderer.htmlIdGenerator != HtmlIdGenerator.NULL && !(node instanceof Document)) { renderer.htmlIdGenerator.generateIds(node.getDocument()); } renderer.render(node); htmlWriter.appendToSilently(output, htmlOptions.maxBlankLines, maxTrailingBlankLines); // resolve any unresolved tracked offsets that are outside elements which resolve their own TrackedOffsetUtils.resolveTrackedOffsets(node.getChars(), htmlWriter, TRACKED_OFFSETS.get(renderer.getDocument()), maxTrailingBlankLines, SharedDataKeys.RUNNING_TESTS.get(options)); renderer.dispose(); } /** * Render the tree of nodes to HTML. * * @param node the root node * @return the rendered HTML. */ @NotNull public String render(@NotNull Node node) { StringBuilder sb = new StringBuilder(); render(node, sb); return sb.toString(); } static public boolean isCompatibleRendererType(@NotNull MutableDataHolder options, @NotNull String supportedRendererType) { String rendererType = HtmlRenderer.TYPE.get(options); return isCompatibleRendererType(options, rendererType, supportedRendererType); } static public boolean isCompatibleRendererType(@NotNull MutableDataHolder options, @NotNull String rendererType, @NotNull String supportedRendererType) { if (rendererType.equals(supportedRendererType)) { return true; } List<Pair<String, String>> equivalence = RENDERER_TYPE_EQUIVALENCE.get(options); for (Pair<String, String> pair : equivalence) { if (rendererType.equals(pair.getFirst())) { if (supportedRendererType.equals(pair.getSecond())) { return true; } } } return false; } @SuppressWarnings("UnusedReturnValue") static public @NotNull MutableDataHolder addRenderTypeEquivalence(@NotNull MutableDataHolder options, @NotNull String rendererType, @NotNull String supportedRendererType) { if (!isCompatibleRendererType(options, rendererType, supportedRendererType)) { // need to add List<Pair<String, String>> equivalence = RENDERER_TYPE_EQUIVALENCE.get(options); ArrayList<Pair<String, String>> newEquivalence = new ArrayList<>(equivalence); newEquivalence.add(new Pair<>(rendererType, supportedRendererType)); options.set(RENDERER_TYPE_EQUIVALENCE, newEquivalence); } return options; } /** * Builder for configuring an {@link HtmlRenderer}. See methods for default configuration. */ public static class Builder extends BuilderBase<Builder> implements RendererBuilder { Map<Class<?>, AttributeProviderFactory> attributeProviderFactories = new LinkedHashMap<>(); List<NodeRendererFactory> nodeRendererFactories = new ArrayList<>(); List<LinkResolverFactory> linkResolverFactories = new ArrayList<>(); HeaderIdGeneratorFactory htmlIdGeneratorFactory = null; public Builder() { super(); } public Builder(@Nullable DataHolder options) { super(options); loadExtensions(); } @Override protected void removeApiPoint(@NotNull Object apiPoint) { if (apiPoint instanceof AttributeProviderFactory) this.attributeProviderFactories.remove(apiPoint.getClass()); else if (apiPoint instanceof NodeRendererFactory) this.nodeRendererFactories.remove(apiPoint); else if (apiPoint instanceof LinkResolverFactory) this.linkResolverFactories.remove(apiPoint); else if (apiPoint instanceof HeaderIdGeneratorFactory) this.htmlIdGeneratorFactory = null; else { throw new IllegalStateException("Unknown data point type: " + apiPoint.getClass().getName()); } } @Override protected void preloadExtension(@NotNull Extension extension) { if (extension instanceof HtmlRendererExtension) { HtmlRendererExtension htmlRendererExtension = (HtmlRendererExtension) extension; htmlRendererExtension.rendererOptions(this); } else if (extension instanceof RendererExtension) { RendererExtension htmlRendererExtension = (RendererExtension) extension; htmlRendererExtension.rendererOptions(this); } } @Override protected boolean loadExtension(@NotNull Extension extension) { if (extension instanceof HtmlRendererExtension) { HtmlRendererExtension htmlRendererExtension = (HtmlRendererExtension) extension; htmlRendererExtension.extend(this, TYPE.get(this)); return true; } else if (extension instanceof RendererExtension) { RendererExtension htmlRendererExtension = (RendererExtension) extension; htmlRendererExtension.extend(this, TYPE.get(this)); return true; } return false; } /** * @return the configured {@link HtmlRenderer} */ @NotNull public HtmlRenderer build() { return new HtmlRenderer(this); } /** * The HTML to use for rendering a softbreak, defaults to {@code "\n"} (meaning the rendered result doesn't have * a line break). * <p> * Set it to {@code "<br>"} (or {@code "<br />"} to make them hard breaks. * <p> * Set it to {@code " "} to ignore line wrapping in the source. * * @param softBreak HTML for softbreak * @return {@code this} */ public @NotNull Builder softBreak(@NotNull String softBreak) { this.set(SOFT_BREAK, softBreak); return this; } /** * The size of the indent to use for hierarchical elements, default 0, means no indent, also fastest rendering * * @param indentSize number of spaces per indent * @return {@code this} */ public @NotNull Builder indentSize(int indentSize) { this.set(INDENT_SIZE, indentSize); return this; } /** * Whether {@link HtmlInline} and {@link HtmlBlock} should be escaped, defaults to {@code false}. * <p> * Note that {@link HtmlInline} is only a tag itself, not the text between an opening tag and a closing tag. So * markup in the text will be parsed as normal and is not affected by this option. * * @param escapeHtml true for escaping, false for preserving raw HTML * @return {@code this} */ public @NotNull Builder escapeHtml(boolean escapeHtml) { this.set(ESCAPE_HTML, escapeHtml); return this; } public boolean isRendererType(@NotNull String supportedRendererType) { String rendererType = HtmlRenderer.TYPE.get(this); return HtmlRenderer.isCompatibleRendererType(this, rendererType, supportedRendererType); } /** * Whether URLs of link or images should be percent-encoded, defaults to {@code false}. * <p> * If enabled, the following is done: * <ul> * <li>Existing percent-encoded parts are preserved (e.g. "%20" is kept as "%20")</li> * <li>Reserved characters such as "/" are preserved, except for "[" and "]" (see encodeURI in JS)</li> * <li>Unreserved characters such as "a" are preserved</li> * <li>Other characters such umlauts are percent-encoded</li> * </ul> * * @param percentEncodeUrls true to percent-encode, false for leaving as-is * @return {@code this} */ public @NotNull Builder percentEncodeUrls(boolean percentEncodeUrls) { this.set(PERCENT_ENCODE_URLS, percentEncodeUrls); return this; } /** * Add an attribute provider for adding/changing HTML attributes to the rendered tags. * * @param attributeProviderFactory the attribute provider factory to add * @return {@code this} */ public @NotNull Builder attributeProviderFactory(@NotNull AttributeProviderFactory attributeProviderFactory) { this.attributeProviderFactories.put(attributeProviderFactory.getClass(), attributeProviderFactory); addExtensionApiPoint(attributeProviderFactory); return this; } /** * Add a factory for instantiating a node renderer (done when rendering). This allows to override the rendering * of node types or define rendering for custom node types. * <p> * If multiple node renderers for the same node type are created, the one from the factory that was added first * "wins". (This is how the rendering for core node types can be overridden; the default rendering comes last.) * * @param nodeRendererFactory the factory for creating a node renderer * @return {@code this} */ public @NotNull Builder nodeRendererFactory(@NotNull NodeRendererFactory nodeRendererFactory) { this.nodeRendererFactories.add(nodeRendererFactory); addExtensionApiPoint(nodeRendererFactory); return this; } /** * Add a factory for instantiating a node renderer (done when rendering). This allows to override the rendering * of node types or define rendering for custom node types. * <p> * If multiple node renderers for the same node type are created, the one from the factory that was added first * "wins". (This is how the rendering for core node types can be overridden; the default rendering comes last.) * * @param linkResolverFactory the factory for creating a node renderer * @return {@code this} */ public @NotNull Builder linkResolverFactory(@NotNull LinkResolverFactory linkResolverFactory) { this.linkResolverFactories.add(linkResolverFactory); addExtensionApiPoint(linkResolverFactory); return this; } /** * Add a factory for resolving URI to content * * @param contentResolverFactory the factory for creating a node renderer * @return {@code this} */ @Override public @NotNull Builder contentResolverFactory(@NotNull UriContentResolverFactory contentResolverFactory) { throw new IllegalStateException("Not implemented"); } /** * Add a factory for generating the header id attribute from the header's text * * @param htmlIdGeneratorFactory the factory for generating header tag id attributes * @return {@code this} */ @NotNull public Builder htmlIdGeneratorFactory(@NotNull HeaderIdGeneratorFactory htmlIdGeneratorFactory) { //noinspection VariableNotUsedInsideIf if (this.htmlIdGeneratorFactory != null) { throw new IllegalStateException("custom header id factory is already set to " + htmlIdGeneratorFactory.getClass().getName()); } this.htmlIdGeneratorFactory = htmlIdGeneratorFactory; addExtensionApiPoint(htmlIdGeneratorFactory); return this; } } /** * Extension for {@link HtmlRenderer}. * <p> * This should be implemented by all extensions that have HtmlRenderer extension code. * <p> * Each extension will have its {@link HtmlRendererExtension#extend(Builder, String)} method called. * and should call back on the builder argument to register all extension points */ public interface HtmlRendererExtension extends Extension { /** * This method is called first on all extensions so that they can adjust the options that must be * common to all extensions. * * @param options option set that will be used for the builder */ void rendererOptions(@NotNull MutableDataHolder options); /** * Called to give each extension to register extension points that it contains * * @param htmlRendererBuilder builder to call back for extension point registration * @param rendererType type of rendering being performed. For now "HTML", "JIRA" or "YOUTRACK" * @see Builder#attributeProviderFactory(AttributeProviderFactory) * @see Builder#nodeRendererFactory(NodeRendererFactory) * @see Builder#linkResolverFactory(LinkResolverFactory) * @see Builder#htmlIdGeneratorFactory(HeaderIdGeneratorFactory) */ void extend(@NotNull Builder htmlRendererBuilder, @NotNull String rendererType); } private class MainNodeRenderer extends NodeRendererSubContext implements NodeRendererContext, Disposable { private Document document; private Map<Class<?>, NodeRenderingHandlerWrapper> renderers; private List<PhasedNodeRenderer> phasedRenderers; private LinkResolver[] myLinkResolvers; private Set<RenderingPhase> renderingPhases; private DataHolder options; private RenderingPhase phase; HtmlIdGenerator htmlIdGenerator; private HashMap<LinkType, HashMap<String, ResolvedLink>> resolvedLinkMap = new HashMap<>(); private AttributeProvider[] attributeProviders; @Override public void dispose() { document = null; renderers = null; phasedRenderers = null; for (LinkResolver linkResolver : myLinkResolvers) { if (linkResolver instanceof Disposable) ((Disposable) linkResolver).dispose(); } myLinkResolvers = null; renderingPhases = null; options = null; if (htmlIdGenerator instanceof Disposable) ((Disposable) htmlIdGenerator).dispose(); htmlIdGenerator = null; resolvedLinkMap = null; for (AttributeProvider attributeProvider : attributeProviders) { if (attributeProvider instanceof Disposable) ((Disposable) attributeProvider).dispose(); } attributeProviders = null; } MainNodeRenderer(DataHolder options, HtmlWriter htmlWriter, Document document) { super(htmlWriter); this.options = new ScopedDataSet(document, options); this.document = document; this.renderers = new HashMap<>(32); this.renderingPhases = new HashSet<>(RenderingPhase.values().length); this.phasedRenderers = new ArrayList<>(nodeRendererFactories.size()); this.myLinkResolvers = new LinkResolver[linkResolverFactories.size()]; this.doNotRenderLinksNesting = htmlOptions.doNotRenderLinksInDocument ? 0 : 1; this.htmlIdGenerator = htmlIdGeneratorFactory != null ? htmlIdGeneratorFactory.create(this) : (!(htmlOptions.renderHeaderId || htmlOptions.generateHeaderIds) ? HtmlIdGenerator.NULL : new HeaderIdGenerator.Factory().create(this)); htmlWriter.setContext(this); for (int i = nodeRendererFactories.size() - 1; i >= 0; i--) { NodeRendererFactory nodeRendererFactory = nodeRendererFactories.get(i); NodeRenderer nodeRenderer = nodeRendererFactory.apply(this.getOptions()); Set<NodeRenderingHandler<?>> renderingHandlers = nodeRenderer.getNodeRenderingHandlers(); assert (renderingHandlers != null); for (NodeRenderingHandler<?> nodeType : renderingHandlers) { // Overwrite existing renderer NodeRenderingHandlerWrapper handlerWrapper = new NodeRenderingHandlerWrapper(nodeType, renderers.get(nodeType.getNodeType())); renderers.put(nodeType.getNodeType(), handlerWrapper); } if (nodeRenderer instanceof PhasedNodeRenderer) { Set<RenderingPhase> renderingPhases = ((PhasedNodeRenderer) nodeRenderer).getRenderingPhases(); assert (renderingPhases != null); this.renderingPhases.addAll(renderingPhases); this.phasedRenderers.add((PhasedNodeRenderer) nodeRenderer); } } for (int i = 0; i < linkResolverFactories.size(); i++) { myLinkResolvers[i] = linkResolverFactories.get(i).apply(this); } this.attributeProviders = new AttributeProvider[attributeProviderFactories.size()]; for (int i = 0; i < attributeProviderFactories.size(); i++) { attributeProviders[i] = attributeProviderFactories.get(i).apply(this); } } @NotNull @Override public Node getCurrentNode() { return renderingNode; } @NotNull @Override public ResolvedLink resolveLink(@NotNull LinkType linkType, @NotNull CharSequence url, Attributes attributes, Boolean urlEncode) { HashMap<String, ResolvedLink> resolvedLinks = resolvedLinkMap.computeIfAbsent(linkType, k -> new HashMap<>()); String urlSeq = String.valueOf(url); ResolvedLink resolvedLink = resolvedLinks.get(urlSeq); if (resolvedLink == null) { resolvedLink = new ResolvedLink(linkType, urlSeq, attributes); if (!urlSeq.isEmpty()) { Node currentNode = getCurrentNode(); for (LinkResolver linkResolver : myLinkResolvers) { resolvedLink = linkResolver.resolveLink(currentNode, this, resolvedLink); if (resolvedLink.getStatus() != LinkStatus.UNKNOWN) break; } if (urlEncode == null && htmlOptions.percentEncodeUrls || urlEncode != null && urlEncode) { resolvedLink = resolvedLink.withUrl(Escaping.percentEncodeUrl(resolvedLink.getUrl())); } } resolvedLinks.put(urlSeq, resolvedLink); } return resolvedLink; } @Override public String getNodeId(@NotNull Node node) { String id = htmlIdGenerator.getId(node); if (attributeProviderFactories.size() != 0) { MutableAttributes attributes = new MutableAttributes(); if (id != null) attributes.replaceValue("id", id); for (AttributeProvider attributeProvider : attributeProviders) { attributeProvider.setAttributes(this.renderingNode, AttributablePart.ID, attributes); } id = attributes.getValue("id"); } return id; } @NotNull @Override public DataHolder getOptions() { return options; } @NotNull @Override public HtmlRendererOptions getHtmlOptions() { return htmlOptions; } @NotNull @Override public Document getDocument() { return document; } @NotNull @Override public RenderingPhase getRenderingPhase() { return phase; } @NotNull @Override public String encodeUrl(@NotNull CharSequence url) { if (htmlOptions.percentEncodeUrls) { return Escaping.percentEncodeUrl(url); } else { return String.valueOf(url); } } @Override public @NotNull MutableAttributes extendRenderingNodeAttributes(@NotNull AttributablePart part, Attributes attributes) { MutableAttributes attr = attributes != null ? attributes.toMutable() : new MutableAttributes(); for (AttributeProvider attributeProvider : attributeProviders) { attributeProvider.setAttributes(this.renderingNode, part, attr); } return attr; } @Override public @NotNull MutableAttributes extendRenderingNodeAttributes(@NotNull Node node, @NotNull AttributablePart part, Attributes attributes) { MutableAttributes attr = attributes != null ? attributes.toMutable() : new MutableAttributes(); for (AttributeProvider attributeProvider : attributeProviders) { attributeProvider.setAttributes(node, part, attr); } return attr; } @Override public void render(@NotNull Node node) { renderNode(node, this); } @Override public void delegateRender() { renderByPreviousHandler(this); } void renderByPreviousHandler(NodeRendererSubContext subContext) { if (subContext.renderingNode != null) { NodeRenderingHandlerWrapper nodeRenderer = subContext.renderingHandlerWrapper.myPreviousRenderingHandler; if (nodeRenderer != null) { Node oldNode = subContext.renderingNode; int oldDoNotRenderLinksNesting = subContext.doNotRenderLinksNesting; NodeRenderingHandlerWrapper prevWrapper = subContext.renderingHandlerWrapper; try { subContext.renderingHandlerWrapper = nodeRenderer; nodeRenderer.myRenderingHandler.render(oldNode, subContext, subContext.htmlWriter); } finally { subContext.renderingNode = oldNode; subContext.doNotRenderLinksNesting = oldDoNotRenderLinksNesting; subContext.renderingHandlerWrapper = prevWrapper; } } } else { throw new IllegalStateException("renderingByPreviousHandler called outside node rendering code"); } } @NotNull @Override public NodeRendererContext getSubContext(boolean inheritIndent) { HtmlWriter htmlWriter = new HtmlWriter(getHtmlWriter(), inheritIndent); htmlWriter.setContext(this); //noinspection ReturnOfInnerClass return new SubNodeRenderer(this, htmlWriter, false); } @NotNull @Override public NodeRendererContext getDelegatedSubContext(boolean inheritIndent) { HtmlWriter htmlWriter = new HtmlWriter(getHtmlWriter(), inheritIndent); htmlWriter.setContext(this); //noinspection ReturnOfInnerClass return new SubNodeRenderer(this, htmlWriter, true); } void renderNode(Node node, NodeRendererSubContext subContext) { if (node instanceof Document) { // here we render multiple phases int oldDoNotRenderLinksNesting = subContext.getDoNotRenderLinksNesting(); int documentDoNotRenderLinksNesting = getHtmlOptions().doNotRenderLinksInDocument ? 1 : 0; this.htmlIdGenerator.generateIds(document); for (RenderingPhase phase : RenderingPhase.values()) { if (phase != RenderingPhase.BODY && !renderingPhases.contains(phase)) { continue; } this.phase = phase; // here we render multiple phases // go through all renderers that want this phase for (PhasedNodeRenderer phasedRenderer : phasedRenderers) { if (Objects.requireNonNull(phasedRenderer.getRenderingPhases()).contains(phase)) { subContext.doNotRenderLinksNesting = documentDoNotRenderLinksNesting; subContext.renderingNode = node; phasedRenderer.renderDocument(subContext, subContext.htmlWriter, (Document) node, phase); subContext.renderingNode = null; subContext.doNotRenderLinksNesting = oldDoNotRenderLinksNesting; } } if (getRenderingPhase() == RenderingPhase.BODY) { NodeRenderingHandlerWrapper nodeRenderer = renderers.get(node.getClass()); if (nodeRenderer != null) { subContext.doNotRenderLinksNesting = documentDoNotRenderLinksNesting; NodeRenderingHandlerWrapper prevWrapper = subContext.renderingHandlerWrapper; try { subContext.renderingNode = node; subContext.renderingHandlerWrapper = nodeRenderer; nodeRenderer.myRenderingHandler.render(node, subContext, subContext.htmlWriter); } finally { subContext.renderingHandlerWrapper = prevWrapper; subContext.renderingNode = null; subContext.doNotRenderLinksNesting = oldDoNotRenderLinksNesting; } } } } } else { NodeRenderingHandlerWrapper nodeRenderer = renderers.get(node.getClass()); if (nodeRenderer != null) { Node oldNode = this.renderingNode; int oldDoNotRenderLinksNesting = subContext.doNotRenderLinksNesting; NodeRenderingHandlerWrapper prevWrapper = subContext.renderingHandlerWrapper; try { subContext.renderingNode = node; subContext.renderingHandlerWrapper = nodeRenderer; nodeRenderer.myRenderingHandler.render(node, subContext, subContext.htmlWriter); } finally { subContext.renderingNode = oldNode; subContext.doNotRenderLinksNesting = oldDoNotRenderLinksNesting; subContext.renderingHandlerWrapper = prevWrapper; } } } } public void renderChildren(@NotNull Node parent) { renderChildrenNode(parent, this); } @SuppressWarnings("WeakerAccess") protected void renderChildrenNode(Node parent, NodeRendererSubContext subContext) { Node node = parent.getFirstChild(); while (node != null) { Node next = node.getNext(); renderNode(node, subContext); node = next; } } @SuppressWarnings("WeakerAccess") private class SubNodeRenderer extends NodeRendererSubContext implements NodeRendererContext { final private MainNodeRenderer myMainNodeRenderer; public SubNodeRenderer(MainNodeRenderer mainNodeRenderer, HtmlWriter htmlWriter, boolean inheritCurrentHandler) { super(htmlWriter); myMainNodeRenderer = mainNodeRenderer; doNotRenderLinksNesting = mainNodeRenderer.getHtmlOptions().doNotRenderLinksInDocument ? 1 : 0; if (inheritCurrentHandler) { renderingNode = mainNodeRenderer.renderingNode; renderingHandlerWrapper = mainNodeRenderer.renderingHandlerWrapper; } } @Override public String getNodeId(@NotNull Node node) {return myMainNodeRenderer.getNodeId(node);} @NotNull @Override public DataHolder getOptions() {return myMainNodeRenderer.getOptions();} @NotNull @Override public HtmlRendererOptions getHtmlOptions() {return myMainNodeRenderer.getHtmlOptions();} @NotNull @Override public Document getDocument() {return myMainNodeRenderer.getDocument();} @NotNull @Override public RenderingPhase getRenderingPhase() {return myMainNodeRenderer.getRenderingPhase();} @NotNull @Override public String encodeUrl(@NotNull CharSequence url) {return myMainNodeRenderer.encodeUrl(url);} @Override public @NotNull MutableAttributes extendRenderingNodeAttributes(@NotNull AttributablePart part, Attributes attributes) { return myMainNodeRenderer.extendRenderingNodeAttributes( part, attributes ); } @Override public @NotNull MutableAttributes extendRenderingNodeAttributes(@NotNull Node node, @NotNull AttributablePart part, Attributes attributes) { return myMainNodeRenderer.extendRenderingNodeAttributes( node, part, attributes ); } @Override public void render(@NotNull Node node) { myMainNodeRenderer.renderNode(node, this); } @Override public void delegateRender() { myMainNodeRenderer.renderByPreviousHandler(this); } @NotNull @Override public Node getCurrentNode() { return myMainNodeRenderer.getCurrentNode(); } @NotNull @Override public ResolvedLink resolveLink(@NotNull LinkType linkType, @NotNull CharSequence url, Boolean urlEncode) { return myMainNodeRenderer.resolveLink(linkType, url, urlEncode); } @NotNull @Override public ResolvedLink resolveLink(@NotNull LinkType linkType, @NotNull CharSequence url, Attributes attributes, Boolean urlEncode) { return myMainNodeRenderer.resolveLink(linkType, url, attributes, urlEncode); } @NotNull @Override public NodeRendererContext getSubContext(boolean inheritIndent) { HtmlWriter htmlWriter = new HtmlWriter(this.htmlWriter, inheritIndent); htmlWriter.setContext(this); //noinspection ReturnOfInnerClass return new SubNodeRenderer(myMainNodeRenderer, htmlWriter, false); } @NotNull @Override public NodeRendererContext getDelegatedSubContext(boolean inheritIndent) { HtmlWriter htmlWriter = new HtmlWriter(this.htmlWriter, inheritIndent); htmlWriter.setContext(this); //noinspection ReturnOfInnerClass return new SubNodeRenderer(myMainNodeRenderer, htmlWriter, true); } @Override public void renderChildren(@NotNull Node parent) { myMainNodeRenderer.renderChildrenNode(parent, this); } @NotNull @Override public HtmlWriter getHtmlWriter() { return htmlWriter; } protected int getDoNotRenderLinksNesting() {return super.getDoNotRenderLinksNesting();} @Override public boolean isDoNotRenderLinks() {return super.isDoNotRenderLinks();} @Override public void doNotRenderLinks(boolean doNotRenderLinks) {super.doNotRenderLinks(doNotRenderLinks);} @Override public void doNotRenderLinks() {super.doNotRenderLinks();} @Override public void doRenderLinks() {super.doRenderLinks();} } } }
//====================================================================================== // Copyright 5AM Solutions Inc, Yale University // // Distributed under the OSI-approved BSD 3-Clause License. // See http://ncip.github.com/caarray/LICENSE.txt for details. //====================================================================================== package gov.nih.nci.caarray.upgrade; import gov.nih.nci.caarray.dao.DaoModule; import gov.nih.nci.caarray.domain.LSID; import gov.nih.nci.caarray.domain.MultiPartBlob; import gov.nih.nci.caarray.domain.array.ArrayDesign; import gov.nih.nci.caarray.domain.file.CaArrayFile; import gov.nih.nci.caarray.domain.file.FileType; import gov.nih.nci.caarray.platforms.PlatformModule; import gov.nih.nci.caarray.platforms.SessionTransactionManager; import gov.nih.nci.caarray.platforms.SessionTransactionManagerNoOpImpl; import gov.nih.nci.caarray.platforms.spi.DataFileHandler; import gov.nih.nci.caarray.platforms.spi.PlatformFileReadException; import gov.nih.nci.caarray.util.CaArrayHibernateHelper; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.sql.Connection; import java.sql.SQLException; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import liquibase.database.Database; import liquibase.exception.CustomChangeException; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.UnhandledException; import com.google.inject.AbstractModule; import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.TypeLiteral; import com.google.inject.util.Types; /** * Migrator to find hybridizations with missing arrays, and where possible, create them based on array designs specified * in data files or the experiment. * * The migrator will use the array design for the experiment, if there is only one. Otherwise, it will extract the array * design from one of the data files associated with the hybridization, if present. If none of the data files specify * the array design, and the experiment has more than one array design specified, then no action will be taken. * * @author Dan Kokotov */ @SuppressWarnings({ "PMD.TooManyMethods", "PMD.CyclomaticComplexity" }) public class FixHybridizationsWithMissingArraysMigrator extends AbstractCustomChange { private Database database; private Set<DataFileHandler> handlers; private final Map<Long, File> openFileMap = new HashMap<Long, File>(); private final FixHybridizationsWithMissingArraysDao dao; private Injector injector; /** * Creates a FixHybridizationsWithMissingArraysMigrator that will make updates to the live database. */ public FixHybridizationsWithMissingArraysMigrator() { this(new FixHybridizationsWithMissingArraysDao()); } /** * Creates a FixHybridizationsWithMissingArraysMigrator that uses a custom data access object for * database operations. Intended for use in testing. * * @param dao handles the database update operations */ public FixHybridizationsWithMissingArraysMigrator(FixHybridizationsWithMissingArraysDao dao) { this.dao = dao; } /** * {@inheritDoc} */ @Override protected void doExecute(Database db) throws CustomChangeException { try { final Connection underlyingConnection = db.getConnection().getUnderlyingConnection(); execute(underlyingConnection); } catch (Exception e) { throw new CustomChangeException(e); } } /** * Execute the change given a connection. * @param connection the connection */ public void execute(Connection connection) { setup(connection); try { List<Long> hybIdsWithoutArray = dao.getHybIdsWithNoArrayOrNoArrayDesign(); for (Long hid : hybIdsWithoutArray) { ensureArrayDesignSetForHyb(hid); } } catch (Exception e) { throw new UnhandledException("Could not fix hybridizations", e); } } private void ensureArrayDesignSetForHyb(Long hid) throws SQLException, IOException, PlatformFileReadException { Long adid = getArrayDesignId(hid); if (adid != null) { setArrayDesignForHyb(hid, adid); } } private Long getArrayDesignId(Long hid) throws SQLException, IOException, PlatformFileReadException { Long adid = dao.getUniqueArrayDesignIdFromExperiment(hid); if (adid == null) { adid = getArrayDesignIdFromFiles(hid); } return adid; } private Long getArrayDesignIdFromFiles(Long hid) throws SQLException, IOException, PlatformFileReadException { List<Long> dataFileIds = dao.getImportedDataFileIdsFromHybId(hid); for (Long fileId : dataFileIds) { Long adid = getArrayDesignFromFile(fileId); if (adid != null) { return adid; } } return null; } @SuppressWarnings("deprecation") private Long getArrayDesignFromFile(Long fileId) throws SQLException, IOException { FileType ft = dao.getFileType(fileId); if (!ft.isArrayData()) { return null; } CaArrayFile caArrayFile = new CaArrayFile(); caArrayFile.setId(fileId); DataFileHandler handler = null; try { handler = getHandler(caArrayFile); return findArrayDesignFromFile(handler).getId(); } catch (PlatformFileReadException e) { throw new IllegalArgumentException("Error reading file " + caArrayFile.getName(), e); } finally { if (handler != null) { handler.closeFiles(); } } } private ArrayDesign findArrayDesignFromFile(DataFileHandler handler) throws PlatformFileReadException { List<LSID> designLsids = handler.getReferencedArrayDesignCandidateIds(); return dao.getFirstArrayDesignFromLsidList(designLsids); } private void setArrayDesignForHyb(Long hid, Long adid) throws SQLException { Long aid = dao.getArrayFromHybridizationId(hid); if (aid != null) { dao.setArrayDesignForArray(aid, adid); } else { dao.setArrayAndDesign(hid, adid); } } /** * Find the appropriate data handler for the given data file, and initialize it. * * @param caArrayFile the data file to be processed * @return the DataFileHandler instance capable of processing that file. That handler will have been initialized * with this file. */ private DataFileHandler getHandler(CaArrayFile caArrayFile) { for (DataFileHandler handler : this.handlers) { try { if (handler.openFile(caArrayFile)) { return handler; } } catch (PlatformFileReadException e) { handler.closeFiles(); throw new IllegalArgumentException("Error reading file " + caArrayFile.getName(), e); } } throw new IllegalArgumentException("Unsupported type " + caArrayFile.getFileType()); } private void setup(Connection connection) { this.injector = createInjector(); dao.setConnection(connection); createHibernateHelper(connection); this.openFileMap.clear(); this.handlers = getHandlers(); } private Injector createInjector() { return Guice.createInjector(new DaoModule(), new PlatformModule(), getLocalModule()); } private AbstractModule getLocalModule() { final AbstractModule localModule = new AbstractModule() { @Override protected void configure() { // TODO: may need to bind a local implementation of storage in place of this: bind(FileManager.class).toInstance(createFileManager()); bind(SessionTransactionManager.class).toInstance(new SessionTransactionManagerNoOpImpl()); bind(CaArrayHibernateHelper.class).toInstance(new SingleConnectionHibernateHelper()); } }; return localModule; } // TODO: remove this when appropriate local implementation of storage is created to replace this. // Until then, keeping it here temporarily as a reference. - A Sy 2011-03-30 // private FileManager createFileManager() { // return new FileManager() { // public File openFile(CaArrayFile caArrayFile) { // try { // File file = getFile(caArrayFile.getId()); // openFileMap.put(caArrayFile.getId(), file); // return file; // } catch (SQLException e) { // throw new IllegalStateException("Could not open the file " + caArrayFile); // } catch (IOException e) { // throw new IllegalStateException("Could not open the file " + caArrayFile); // } // } // // public void closeFile(CaArrayFile caArrayFile) { // File file = openFileMap.get(caArrayFile.getId()); // FileUtils.deleteQuietly(file); // } // }; // } private File getFile(Long fileId) throws SQLException, IOException { MultiPartBlob mpb = dao.getFileBlob(fileId); File f = File.createTempFile("datafile", null); InputStream is = mpb.readUncompressedContents(); FileOutputStream fos = FileUtils.openOutputStream(f); IOUtils.copy(is, fos); IOUtils.closeQuietly(is); IOUtils.closeQuietly(fos); return f; } private SingleConnectionHibernateHelper createHibernateHelper(Connection connection) { SingleConnectionHibernateHelper hibernateHelper = (SingleConnectionHibernateHelper) injector .getInstance(CaArrayHibernateHelper.class); hibernateHelper.initialize(connection); return hibernateHelper; } @SuppressWarnings("unchecked") private Set<DataFileHandler> getHandlers() { return (Set<DataFileHandler>) injector.getInstance(Key.get(TypeLiteral.get(Types .setOf(DataFileHandler.class)))); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.connector.system.jdbc; import com.facebook.presto.Session; import com.facebook.presto.connector.system.GlobalSystemTransactionHandle; import com.facebook.presto.metadata.Metadata; import com.facebook.presto.metadata.QualifiedObjectName; import com.facebook.presto.metadata.QualifiedTablePrefix; import com.facebook.presto.spi.ColumnMetadata; import com.facebook.presto.spi.ConnectorSession; import com.facebook.presto.spi.ConnectorTableMetadata; import com.facebook.presto.spi.InMemoryRecordSet; import com.facebook.presto.spi.InMemoryRecordSet.Builder; import com.facebook.presto.spi.RecordCursor; import com.facebook.presto.spi.SchemaTableName; import com.facebook.presto.spi.connector.ConnectorTransactionHandle; import com.facebook.presto.spi.predicate.TupleDomain; import com.facebook.presto.spi.type.CharType; import com.facebook.presto.spi.type.DecimalType; import com.facebook.presto.spi.type.Type; import com.facebook.presto.spi.type.VarcharType; import com.facebook.presto.type.ArrayType; import javax.inject.Inject; import java.sql.DatabaseMetaData; import java.sql.Types; import java.util.List; import java.util.Map.Entry; import java.util.Optional; import static com.facebook.presto.connector.system.jdbc.FilterUtil.filter; import static com.facebook.presto.connector.system.jdbc.FilterUtil.stringFilter; import static com.facebook.presto.connector.system.jdbc.FilterUtil.toSession; import static com.facebook.presto.metadata.MetadataUtil.TableMetadataBuilder.tableMetadataBuilder; import static com.facebook.presto.spi.type.BigintType.BIGINT; import static com.facebook.presto.spi.type.BooleanType.BOOLEAN; import static com.facebook.presto.spi.type.Chars.isCharType; import static com.facebook.presto.spi.type.DateType.DATE; import static com.facebook.presto.spi.type.DoubleType.DOUBLE; import static com.facebook.presto.spi.type.IntegerType.INTEGER; import static com.facebook.presto.spi.type.RealType.REAL; import static com.facebook.presto.spi.type.SmallintType.SMALLINT; import static com.facebook.presto.spi.type.TimeType.TIME; import static com.facebook.presto.spi.type.TimeWithTimeZoneType.TIME_WITH_TIME_ZONE; import static com.facebook.presto.spi.type.TimestampType.TIMESTAMP; import static com.facebook.presto.spi.type.TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE; import static com.facebook.presto.spi.type.TinyintType.TINYINT; import static com.facebook.presto.spi.type.VarbinaryType.VARBINARY; import static com.facebook.presto.spi.type.VarcharType.createUnboundedVarcharType; import static com.facebook.presto.spi.type.Varchars.isVarcharType; import static com.facebook.presto.util.Types.checkType; import static java.util.Objects.requireNonNull; public class ColumnJdbcTable extends JdbcTable { public static final SchemaTableName NAME = new SchemaTableName("jdbc", "columns"); public static final ConnectorTableMetadata METADATA = tableMetadataBuilder(NAME) .column("table_cat", createUnboundedVarcharType()) .column("table_schem", createUnboundedVarcharType()) .column("table_name", createUnboundedVarcharType()) .column("column_name", createUnboundedVarcharType()) .column("data_type", BIGINT) .column("type_name", createUnboundedVarcharType()) .column("column_size", BIGINT) .column("buffer_length", BIGINT) .column("decimal_digits", BIGINT) .column("num_prec_radix", BIGINT) .column("nullable", BIGINT) .column("remarks", createUnboundedVarcharType()) .column("column_def", createUnboundedVarcharType()) .column("sql_data_type", BIGINT) .column("sql_datetime_sub", BIGINT) .column("char_octet_length", BIGINT) .column("ordinal_position", BIGINT) .column("is_nullable", createUnboundedVarcharType()) .column("scope_catalog", createUnboundedVarcharType()) .column("scope_schema", createUnboundedVarcharType()) .column("scope_table", createUnboundedVarcharType()) .column("source_data_type", BIGINT) .column("is_autoincrement", createUnboundedVarcharType()) .column("is_generatedcolumn", createUnboundedVarcharType()) .build(); private final Metadata metadata; @Inject public ColumnJdbcTable(Metadata metadata) { this.metadata = requireNonNull(metadata); } @Override public ConnectorTableMetadata getTableMetadata() { return METADATA; } @Override public RecordCursor cursor(ConnectorTransactionHandle transactionHandle, ConnectorSession connectorSession, TupleDomain<Integer> constraint) { GlobalSystemTransactionHandle transaction = checkType(transactionHandle, GlobalSystemTransactionHandle.class, "transaction"); Session session = toSession(transaction.getTransactionId(), connectorSession); Optional<String> catalogFilter = stringFilter(constraint, 0); Optional<String> schemaFilter = stringFilter(constraint, 1); Optional<String> tableFilter = stringFilter(constraint, 2); Builder table = InMemoryRecordSet.builder(METADATA); for (String catalog : filter(metadata.getCatalogNames(session).keySet(), catalogFilter)) { QualifiedTablePrefix prefix = FilterUtil.tablePrefix(catalog, schemaFilter, tableFilter); for (Entry<QualifiedObjectName, List<ColumnMetadata>> entry : metadata.listTableColumns(session, prefix).entrySet()) { addColumnRows(table, entry.getKey(), entry.getValue()); } } return table.build().cursor(); } private static void addColumnRows(Builder builder, QualifiedObjectName tableName, List<ColumnMetadata> columns) { int ordinalPosition = 1; for (ColumnMetadata column : columns) { if (column.isHidden()) { continue; } builder.addRow( tableName.getCatalogName(), tableName.getSchemaName(), tableName.getObjectName(), column.getName(), jdbcDataType(column.getType()), column.getType().getDisplayName(), columnSize(column.getType()), 0, decimalDigits(column.getType()), numPrecRadix(column.getType()), DatabaseMetaData.columnNullableUnknown, column.getComment(), null, null, null, charOctetLength(column.getType()), ordinalPosition, "", null, null, null, null, null, null); ordinalPosition++; } } private static int jdbcDataType(Type type) { if (type.equals(BOOLEAN)) { return Types.BOOLEAN; } if (type.equals(BIGINT)) { return Types.BIGINT; } if (type.equals(INTEGER)) { return Types.INTEGER; } if (type.equals(SMALLINT)) { return Types.SMALLINT; } if (type.equals(TINYINT)) { return Types.TINYINT; } if (type.equals(REAL)) { return Types.REAL; } if (type.equals(DOUBLE)) { return Types.DOUBLE; } if (type instanceof DecimalType) { return Types.DECIMAL; } if (isVarcharType(type)) { return Types.LONGNVARCHAR; } if (isCharType(type)) { return Types.CHAR; } if (type.equals(VARBINARY)) { return Types.LONGVARBINARY; } if (type.equals(TIME)) { return Types.TIME; } if (type.equals(TIME_WITH_TIME_ZONE)) { return Types.TIME_WITH_TIMEZONE; } if (type.equals(TIMESTAMP)) { return Types.TIMESTAMP; } if (type.equals(TIMESTAMP_WITH_TIME_ZONE)) { return Types.TIMESTAMP_WITH_TIMEZONE; } if (type.equals(DATE)) { return Types.DATE; } if (type instanceof ArrayType) { return Types.ARRAY; } return Types.JAVA_OBJECT; } private static Integer columnSize(Type type) { if (type.equals(BIGINT)) { return 19; // 2**63-1 } if (type.equals(INTEGER)) { return 10; // 2**31-1 } if (type.equals(SMALLINT)) { return 5; // 2**15-1 } if (type.equals(TINYINT)) { return 3; // 2**7-1 } if (type instanceof DecimalType) { return ((DecimalType) type).getPrecision(); } if (type.equals(REAL)) { return 24; // IEEE 754 } if (type.equals(DOUBLE)) { return 53; // IEEE 754 } if (isVarcharType(type)) { return ((VarcharType) type).getLength(); } if (isCharType(type)) { return ((CharType) type).getLength(); } if (type.equals(VARBINARY)) { return Integer.MAX_VALUE; } if (type.equals(TIME)) { return 8; // 00:00:00 } if (type.equals(TIME_WITH_TIME_ZONE)) { return 8 + 6; // 00:00:00+00:00 } if (type.equals(DATE)) { return 14; // +5881580-07-11 (2**31-1 days) } if (type.equals(TIMESTAMP)) { return 15 + 8; } if (type.equals(TIMESTAMP_WITH_TIME_ZONE)) { return 15 + 8 + 6; } return null; } // DECIMAL_DIGITS is the number of fractional digits private static Integer decimalDigits(Type type) { if (type instanceof DecimalType) { return ((DecimalType) type).getScale(); } return null; } private static Integer charOctetLength(Type type) { if (isVarcharType(type)) { return ((VarcharType) type).getLength(); } if (isCharType(type)) { return ((CharType) type).getLength(); } if (type.equals(VARBINARY)) { return Integer.MAX_VALUE; } return null; } private static Integer numPrecRadix(Type type) { if (type.equals(BIGINT) || type.equals(INTEGER) || type.equals(SMALLINT) || type.equals(TINYINT) || (type instanceof DecimalType)) { return 10; } if (type.equals(REAL) || type.equals(DOUBLE)) { return 2; } return null; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.lucene.index; import org.apache.lucene.analysis.core.KeywordAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.*; import org.apache.lucene.queries.TermsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOUtils; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.test.ESTestCase; import org.junit.After; import org.junit.Before; import java.util.*; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; /** */ public class FreqTermsEnumTests extends ESTestCase { private String[] terms; private IndexWriter iw; private IndexReader reader; private Map<String, FreqHolder> referenceAll; private Map<String, FreqHolder> referenceNotDeleted; private Map<String, FreqHolder> referenceFilter; private Query filter; static class FreqHolder { int docFreq; long totalTermFreq; } @Before @Override public void setUp() throws Exception { super.setUp(); referenceAll = new HashMap<>(); referenceNotDeleted = new HashMap<>(); referenceFilter = new HashMap<>(); Directory dir = newDirectory(); IndexWriterConfig conf = newIndexWriterConfig(new KeywordAnalyzer()); // use keyword analyzer we rely on the stored field holding the exact term. if (frequently()) { // we don't want to do any merges, so we won't expunge deletes conf.setMergePolicy(NoMergePolicy.INSTANCE); } iw = new IndexWriter(dir, conf); terms = new String[scaledRandomIntBetween(10, 300)]; for (int i = 0; i < terms.length; i++) { terms[i] = randomAsciiOfLength(5); } int numberOfDocs = scaledRandomIntBetween(30, 300); Document[] docs = new Document[numberOfDocs]; for (int i = 0; i < numberOfDocs; i++) { Document doc = new Document(); doc.add(new StringField("id", Integer.toString(i), Field.Store.YES)); docs[i] = doc; for (String term : terms) { if (randomBoolean()) { continue; } int freq = randomIntBetween(1, 3); for (int j = 0; j < freq; j++) { doc.add(new TextField("field", term, Field.Store.YES)); } } } // add all docs for (int i = 0; i < docs.length; i++) { Document doc = docs[i]; iw.addDocument(doc); if (rarely()) { iw.commit(); } } Set<String> deletedIds = new HashSet<>(); for (int i = 0; i < docs.length; i++) { Document doc = docs[i]; if (randomInt(5) == 2) { Term idTerm = new Term("id", doc.getField("id").stringValue()); deletedIds.add(idTerm.text()); iw.deleteDocuments(idTerm); } } for (String term : terms) { referenceAll.put(term, new FreqHolder()); referenceFilter.put(term, new FreqHolder()); referenceNotDeleted.put(term, new FreqHolder()); } // now go over each doc, build the relevant references and filter reader = DirectoryReader.open(iw, true); List<Term> filterTerms = new ArrayList<>(); for (int docId = 0; docId < reader.maxDoc(); docId++) { Document doc = reader.document(docId); addFreqs(doc, referenceAll); if (!deletedIds.contains(doc.getField("id").stringValue())) { addFreqs(doc, referenceNotDeleted); if (randomBoolean()) { filterTerms.add(new Term("id", doc.getField("id").stringValue())); addFreqs(doc, referenceFilter); } } } filter = new TermsQuery(filterTerms); } private void addFreqs(Document doc, Map<String, FreqHolder> reference) { Set<String> addedDocFreq = new HashSet<>(); for (IndexableField field : doc.getFields("field")) { String term = field.stringValue(); FreqHolder freqHolder = reference.get(term); if (!addedDocFreq.contains(term)) { freqHolder.docFreq++; addedDocFreq.add(term); } freqHolder.totalTermFreq++; } } @After @Override public void tearDown() throws Exception { IOUtils.close(reader, iw, iw.getDirectory()); super.tearDown(); } public void testAllFreqs() throws Exception { assertAgainstReference(true, true, null, referenceAll); assertAgainstReference(true, false, null, referenceAll); assertAgainstReference(false, true, null, referenceAll); } public void testNonDeletedFreqs() throws Exception { assertAgainstReference(true, true, Queries.newMatchAllQuery(), referenceNotDeleted); assertAgainstReference(true, false, Queries.newMatchAllQuery(), referenceNotDeleted); assertAgainstReference(false, true, Queries.newMatchAllQuery(), referenceNotDeleted); } public void testFilterFreqs() throws Exception { assertAgainstReference(true, true, filter, referenceFilter); assertAgainstReference(true, false, filter, referenceFilter); assertAgainstReference(false, true, filter, referenceFilter); } private void assertAgainstReference(boolean docFreq, boolean totalTermFreq, Query filter, Map<String, FreqHolder> reference) throws Exception { FreqTermsEnum freqTermsEnum = new FreqTermsEnum(reader, "field", docFreq, totalTermFreq, filter, BigArrays.NON_RECYCLING_INSTANCE); assertAgainstReference(freqTermsEnum, reference, docFreq, totalTermFreq); } private void assertAgainstReference(FreqTermsEnum termsEnum, Map<String, FreqHolder> reference, boolean docFreq, boolean totalTermFreq) throws Exception { int cycles = randomIntBetween(1, 5); for (int i = 0; i < cycles; i++) { List<String> terms = new ArrayList<>(Arrays.asList(this.terms)); Collections.shuffle(terms, random()); for (String term : terms) { if (!termsEnum.seekExact(new BytesRef(term))) { assertThat("term : " + term, reference.get(term).docFreq, is(0)); continue; } if (docFreq) { assertThat("cycle " + i + ", term " + term + ", docFreq", termsEnum.docFreq(), equalTo(reference.get(term).docFreq)); } if (totalTermFreq) { assertThat("cycle " + i + ", term " + term + ", totalTermFreq", termsEnum.totalTermFreq(), equalTo(reference.get(term).totalTermFreq)); } } } } }
// Generated from SimpleHeader.g4 by ANTLR 4.5 package dm.generator.antlr; import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.dfa.DFA; import org.antlr.v4.runtime.*; import org.antlr.v4.runtime.misc.*; import org.antlr.v4.runtime.tree.*; import java.util.List; import java.util.Iterator; import java.util.ArrayList; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) public class SimpleHeaderParser extends Parser { static { RuntimeMetaData.checkVersion("4.5", RuntimeMetaData.VERSION); } protected static final DFA[] _decisionToDFA; protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int T__0=1, T__1=2, T__2=3, T__3=4, T__4=5, T__5=6, T__6=7, T__7=8, LBRACE=9, RBRACE=10, LPAREN=11, RPAREN=12, LBRACKET=13, RBRACKET=14, SEMICOLON=15, COMMA=16, STAR=17, CONST=18, IDENTIFIER=19, NUMBER=20, WS=21; public static final int RULE_expression = 0, RULE_type = 1, RULE_enum_specifier = 2, RULE_enum_entry = 3, RULE_enum_value = 4, RULE_include = 5, RULE_typedef = 6, RULE_function_dec = 7, RULE_param_list = 8, RULE_param = 9; public static final String[] ruleNames = { "expression", "type", "enum_specifier", "enum_entry", "enum_value", "include", "typedef", "function_dec", "param_list", "param" }; private static final String[] _LITERAL_NAMES = { null, "'enum'", "'='", "'#include'", "'#import'", "'<'", "'\"'", "'>'", "'typedef'", "'{'", "'}'", "'('", "')'", "'['", "']'", "';'", "','", "'*'", "'const'" }; private static final String[] _SYMBOLIC_NAMES = { null, null, null, null, null, null, null, null, null, "LBRACE", "RBRACE", "LPAREN", "RPAREN", "LBRACKET", "RBRACKET", "SEMICOLON", "COMMA", "STAR", "CONST", "IDENTIFIER", "NUMBER", "WS" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); /** * @deprecated Use {@link #VOCABULARY} instead. */ @Deprecated public static final String[] tokenNames; static { tokenNames = new String[_SYMBOLIC_NAMES.length]; for (int i = 0; i < tokenNames.length; i++) { tokenNames[i] = VOCABULARY.getLiteralName(i); if (tokenNames[i] == null) { tokenNames[i] = VOCABULARY.getSymbolicName(i); } if (tokenNames[i] == null) { tokenNames[i] = "<INVALID>"; } } } @Override @Deprecated public String[] getTokenNames() { return tokenNames; } @Override public Vocabulary getVocabulary() { return VOCABULARY; } @Override public String getGrammarFileName() { return "SimpleHeader.g4"; } @Override public String[] getRuleNames() { return ruleNames; } @Override public String getSerializedATN() { return _serializedATN; } @Override public ATN getATN() { return _ATN; } public SimpleHeaderParser(TokenStream input) { super(input); _interp = new ParserATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); } public static class ExpressionContext extends ParserRuleContext { public ExpressionContext expression() { return getRuleContext(ExpressionContext.class,0); } public IncludeContext include() { return getRuleContext(IncludeContext.class,0); } public Enum_specifierContext enum_specifier() { return getRuleContext(Enum_specifierContext.class,0); } public TypedefContext typedef() { return getRuleContext(TypedefContext.class,0); } public Function_decContext function_dec() { return getRuleContext(Function_decContext.class,0); } public TerminalNode EOF() { return getToken(SimpleHeaderParser.EOF, 0); } public ExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_expression; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof SimpleHeaderListener ) ((SimpleHeaderListener)listener).enterExpression(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof SimpleHeaderListener ) ((SimpleHeaderListener)listener).exitExpression(this); } } public final ExpressionContext expression() throws RecognitionException { ExpressionContext _localctx = new ExpressionContext(_ctx, getState()); enterRule(_localctx, 0, RULE_expression); try { setState(29); switch (_input.LA(1)) { case T__0: case T__2: case T__3: case T__7: case IDENTIFIER: enterOuterAlt(_localctx, 1); { setState(24); switch (_input.LA(1)) { case T__2: case T__3: { setState(20); include(); } break; case T__0: { setState(21); enum_specifier(); } break; case T__7: { setState(22); typedef(); } break; case IDENTIFIER: { setState(23); function_dec(); } break; default: throw new NoViableAltException(this); } setState(26); expression(); } break; case EOF: enterOuterAlt(_localctx, 2); { setState(28); match(EOF); } break; default: throw new NoViableAltException(this); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class TypeContext extends ParserRuleContext { public TerminalNode IDENTIFIER() { return getToken(SimpleHeaderParser.IDENTIFIER, 0); } public TerminalNode STAR() { return getToken(SimpleHeaderParser.STAR, 0); } public TerminalNode CONST() { return getToken(SimpleHeaderParser.CONST, 0); } public TypeContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_type; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof SimpleHeaderListener ) ((SimpleHeaderListener)listener).enterType(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof SimpleHeaderListener ) ((SimpleHeaderListener)listener).exitType(this); } } public final TypeContext type() throws RecognitionException { TypeContext _localctx = new TypeContext(_ctx, getState()); enterRule(_localctx, 2, RULE_type); int _la; try { enterOuterAlt(_localctx, 1); { setState(39); switch ( getInterpreter().adaptivePredict(_input,4,_ctx) ) { case 1: { setState(32); _la = _input.LA(1); if (_la==CONST) { { setState(31); match(CONST); } } setState(34); match(IDENTIFIER); } break; case 2: { setState(35); match(IDENTIFIER); setState(37); _la = _input.LA(1); if (_la==CONST) { { setState(36); match(CONST); } } } break; } setState(42); _la = _input.LA(1); if (_la==STAR) { { setState(41); match(STAR); } } } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class Enum_specifierContext extends ParserRuleContext { public TerminalNode LBRACE() { return getToken(SimpleHeaderParser.LBRACE, 0); } public TerminalNode RBRACE() { return getToken(SimpleHeaderParser.RBRACE, 0); } public TerminalNode SEMICOLON() { return getToken(SimpleHeaderParser.SEMICOLON, 0); } public TerminalNode IDENTIFIER() { return getToken(SimpleHeaderParser.IDENTIFIER, 0); } public List<Enum_entryContext> enum_entry() { return getRuleContexts(Enum_entryContext.class); } public Enum_entryContext enum_entry(int i) { return getRuleContext(Enum_entryContext.class,i); } public Enum_specifierContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_enum_specifier; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof SimpleHeaderListener ) ((SimpleHeaderListener)listener).enterEnum_specifier(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof SimpleHeaderListener ) ((SimpleHeaderListener)listener).exitEnum_specifier(this); } } public final Enum_specifierContext enum_specifier() throws RecognitionException { Enum_specifierContext _localctx = new Enum_specifierContext(_ctx, getState()); enterRule(_localctx, 4, RULE_enum_specifier); int _la; try { enterOuterAlt(_localctx, 1); { setState(44); match(T__0); setState(46); _la = _input.LA(1); if (_la==IDENTIFIER) { { setState(45); match(IDENTIFIER); } } setState(48); match(LBRACE); setState(50); _errHandler.sync(this); _la = _input.LA(1); do { { { setState(49); enum_entry(); } } setState(52); _errHandler.sync(this); _la = _input.LA(1); } while ( _la==IDENTIFIER ); setState(54); match(RBRACE); setState(55); match(SEMICOLON); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class Enum_entryContext extends ParserRuleContext { public TerminalNode IDENTIFIER() { return getToken(SimpleHeaderParser.IDENTIFIER, 0); } public Enum_valueContext enum_value() { return getRuleContext(Enum_valueContext.class,0); } public TerminalNode COMMA() { return getToken(SimpleHeaderParser.COMMA, 0); } public Enum_entryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_enum_entry; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof SimpleHeaderListener ) ((SimpleHeaderListener)listener).enterEnum_entry(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof SimpleHeaderListener ) ((SimpleHeaderListener)listener).exitEnum_entry(this); } } public final Enum_entryContext enum_entry() throws RecognitionException { Enum_entryContext _localctx = new Enum_entryContext(_ctx, getState()); enterRule(_localctx, 6, RULE_enum_entry); int _la; try { enterOuterAlt(_localctx, 1); { setState(57); match(IDENTIFIER); setState(59); _la = _input.LA(1); if (_la==T__1) { { setState(58); enum_value(); } } setState(62); _la = _input.LA(1); if (_la==COMMA) { { setState(61); match(COMMA); } } } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class Enum_valueContext extends ParserRuleContext { public TerminalNode NUMBER() { return getToken(SimpleHeaderParser.NUMBER, 0); } public Enum_valueContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_enum_value; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof SimpleHeaderListener ) ((SimpleHeaderListener)listener).enterEnum_value(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof SimpleHeaderListener ) ((SimpleHeaderListener)listener).exitEnum_value(this); } } public final Enum_valueContext enum_value() throws RecognitionException { Enum_valueContext _localctx = new Enum_valueContext(_ctx, getState()); enterRule(_localctx, 8, RULE_enum_value); try { enterOuterAlt(_localctx, 1); { setState(64); match(T__1); setState(65); match(NUMBER); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class IncludeContext extends ParserRuleContext { public TerminalNode IDENTIFIER() { return getToken(SimpleHeaderParser.IDENTIFIER, 0); } public IncludeContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_include; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof SimpleHeaderListener ) ((SimpleHeaderListener)listener).enterInclude(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof SimpleHeaderListener ) ((SimpleHeaderListener)listener).exitInclude(this); } } public final IncludeContext include() throws RecognitionException { IncludeContext _localctx = new IncludeContext(_ctx, getState()); enterRule(_localctx, 10, RULE_include); int _la; try { enterOuterAlt(_localctx, 1); { setState(67); _la = _input.LA(1); if ( !(_la==T__2 || _la==T__3) ) { _errHandler.recoverInline(this); } else { consume(); } setState(68); _la = _input.LA(1); if ( !(_la==T__4 || _la==T__5) ) { _errHandler.recoverInline(this); } else { consume(); } setState(69); match(IDENTIFIER); setState(70); _la = _input.LA(1); if ( !(_la==T__5 || _la==T__6) ) { _errHandler.recoverInline(this); } else { consume(); } } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class TypedefContext extends ParserRuleContext { public List<TerminalNode> IDENTIFIER() { return getTokens(SimpleHeaderParser.IDENTIFIER); } public TerminalNode IDENTIFIER(int i) { return getToken(SimpleHeaderParser.IDENTIFIER, i); } public TerminalNode SEMICOLON() { return getToken(SimpleHeaderParser.SEMICOLON, 0); } public TypedefContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_typedef; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof SimpleHeaderListener ) ((SimpleHeaderListener)listener).enterTypedef(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof SimpleHeaderListener ) ((SimpleHeaderListener)listener).exitTypedef(this); } } public final TypedefContext typedef() throws RecognitionException { TypedefContext _localctx = new TypedefContext(_ctx, getState()); enterRule(_localctx, 12, RULE_typedef); try { enterOuterAlt(_localctx, 1); { setState(72); match(T__7); setState(73); match(IDENTIFIER); setState(74); match(IDENTIFIER); setState(75); match(SEMICOLON); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class Function_decContext extends ParserRuleContext { public List<TerminalNode> IDENTIFIER() { return getTokens(SimpleHeaderParser.IDENTIFIER); } public TerminalNode IDENTIFIER(int i) { return getToken(SimpleHeaderParser.IDENTIFIER, i); } public TerminalNode LPAREN() { return getToken(SimpleHeaderParser.LPAREN, 0); } public Param_listContext param_list() { return getRuleContext(Param_listContext.class,0); } public TerminalNode RPAREN() { return getToken(SimpleHeaderParser.RPAREN, 0); } public TerminalNode SEMICOLON() { return getToken(SimpleHeaderParser.SEMICOLON, 0); } public Function_decContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_function_dec; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof SimpleHeaderListener ) ((SimpleHeaderListener)listener).enterFunction_dec(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof SimpleHeaderListener ) ((SimpleHeaderListener)listener).exitFunction_dec(this); } } public final Function_decContext function_dec() throws RecognitionException { Function_decContext _localctx = new Function_decContext(_ctx, getState()); enterRule(_localctx, 14, RULE_function_dec); try { enterOuterAlt(_localctx, 1); { setState(77); match(IDENTIFIER); setState(78); match(IDENTIFIER); setState(79); match(LPAREN); setState(80); param_list(); setState(81); match(RPAREN); setState(82); match(SEMICOLON); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class Param_listContext extends ParserRuleContext { public List<ParamContext> param() { return getRuleContexts(ParamContext.class); } public ParamContext param(int i) { return getRuleContext(ParamContext.class,i); } public List<TerminalNode> COMMA() { return getTokens(SimpleHeaderParser.COMMA); } public TerminalNode COMMA(int i) { return getToken(SimpleHeaderParser.COMMA, i); } public Param_listContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_param_list; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof SimpleHeaderListener ) ((SimpleHeaderListener)listener).enterParam_list(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof SimpleHeaderListener ) ((SimpleHeaderListener)listener).exitParam_list(this); } } public final Param_listContext param_list() throws RecognitionException { Param_listContext _localctx = new Param_listContext(_ctx, getState()); enterRule(_localctx, 16, RULE_param_list); int _la; try { enterOuterAlt(_localctx, 1); { setState(92); _la = _input.LA(1); if (_la==CONST || _la==IDENTIFIER) { { setState(84); param(); setState(89); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { setState(85); match(COMMA); setState(86); param(); } } setState(91); _errHandler.sync(this); _la = _input.LA(1); } } } } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class ParamContext extends ParserRuleContext { public TypeContext type() { return getRuleContext(TypeContext.class,0); } public TerminalNode IDENTIFIER() { return getToken(SimpleHeaderParser.IDENTIFIER, 0); } public ParamContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_param; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof SimpleHeaderListener ) ((SimpleHeaderListener)listener).enterParam(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof SimpleHeaderListener ) ((SimpleHeaderListener)listener).exitParam(this); } } public final ParamContext param() throws RecognitionException { ParamContext _localctx = new ParamContext(_ctx, getState()); enterRule(_localctx, 18, RULE_param); try { enterOuterAlt(_localctx, 1); { setState(94); type(); setState(95); match(IDENTIFIER); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static final String _serializedATN = "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3\27d\4\2\t\2\4\3\t"+ "\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\3"+ "\2\3\2\3\2\3\2\5\2\33\n\2\3\2\3\2\3\2\5\2 \n\2\3\3\5\3#\n\3\3\3\3\3\3"+ "\3\5\3(\n\3\5\3*\n\3\3\3\5\3-\n\3\3\4\3\4\5\4\61\n\4\3\4\3\4\6\4\65\n"+ "\4\r\4\16\4\66\3\4\3\4\3\4\3\5\3\5\5\5>\n\5\3\5\5\5A\n\5\3\6\3\6\3\6\3"+ "\7\3\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\n"+ "\3\n\3\n\7\nZ\n\n\f\n\16\n]\13\n\5\n_\n\n\3\13\3\13\3\13\3\13\2\2\f\2"+ "\4\6\b\n\f\16\20\22\24\2\5\3\2\5\6\3\2\7\b\3\2\b\tg\2\37\3\2\2\2\4)\3"+ "\2\2\2\6.\3\2\2\2\b;\3\2\2\2\nB\3\2\2\2\fE\3\2\2\2\16J\3\2\2\2\20O\3\2"+ "\2\2\22^\3\2\2\2\24`\3\2\2\2\26\33\5\f\7\2\27\33\5\6\4\2\30\33\5\16\b"+ "\2\31\33\5\20\t\2\32\26\3\2\2\2\32\27\3\2\2\2\32\30\3\2\2\2\32\31\3\2"+ "\2\2\33\34\3\2\2\2\34\35\5\2\2\2\35 \3\2\2\2\36 \7\2\2\3\37\32\3\2\2\2"+ "\37\36\3\2\2\2 \3\3\2\2\2!#\7\24\2\2\"!\3\2\2\2\"#\3\2\2\2#$\3\2\2\2$"+ "*\7\25\2\2%\'\7\25\2\2&(\7\24\2\2\'&\3\2\2\2\'(\3\2\2\2(*\3\2\2\2)\"\3"+ "\2\2\2)%\3\2\2\2*,\3\2\2\2+-\7\23\2\2,+\3\2\2\2,-\3\2\2\2-\5\3\2\2\2."+ "\60\7\3\2\2/\61\7\25\2\2\60/\3\2\2\2\60\61\3\2\2\2\61\62\3\2\2\2\62\64"+ "\7\13\2\2\63\65\5\b\5\2\64\63\3\2\2\2\65\66\3\2\2\2\66\64\3\2\2\2\66\67"+ "\3\2\2\2\678\3\2\2\289\7\f\2\29:\7\21\2\2:\7\3\2\2\2;=\7\25\2\2<>\5\n"+ "\6\2=<\3\2\2\2=>\3\2\2\2>@\3\2\2\2?A\7\22\2\2@?\3\2\2\2@A\3\2\2\2A\t\3"+ "\2\2\2BC\7\4\2\2CD\7\26\2\2D\13\3\2\2\2EF\t\2\2\2FG\t\3\2\2GH\7\25\2\2"+ "HI\t\4\2\2I\r\3\2\2\2JK\7\n\2\2KL\7\25\2\2LM\7\25\2\2MN\7\21\2\2N\17\3"+ "\2\2\2OP\7\25\2\2PQ\7\25\2\2QR\7\r\2\2RS\5\22\n\2ST\7\16\2\2TU\7\21\2"+ "\2U\21\3\2\2\2V[\5\24\13\2WX\7\22\2\2XZ\5\24\13\2YW\3\2\2\2Z]\3\2\2\2"+ "[Y\3\2\2\2[\\\3\2\2\2\\_\3\2\2\2][\3\2\2\2^V\3\2\2\2^_\3\2\2\2_\23\3\2"+ "\2\2`a\5\4\3\2ab\7\25\2\2b\25\3\2\2\2\16\32\37\"\'),\60\66=@[^"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { _decisionToDFA = new DFA[_ATN.getNumberOfDecisions()]; for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) { _decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i); } } }
/* * ModeShape (http://www.modeshape.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.modeshape.jcr; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNull.notNullValue; import static org.junit.Assert.assertThat; import java.io.IOException; import java.io.InputStream; import java.util.concurrent.TimeUnit; import javax.jcr.ImportUUIDBehavior; import javax.jcr.Node; import javax.jcr.RepositoryException; import javax.jcr.Session; import javax.jcr.Workspace; import junit.framework.AssertionFailedError; import org.infinispan.schematic.document.Changes; import org.infinispan.schematic.document.Document; import org.infinispan.schematic.document.EditableArray; import org.infinispan.schematic.document.EditableDocument; import org.infinispan.schematic.document.Editor; import org.infinispan.schematic.document.Json; import org.junit.After; import org.junit.Before; import org.modeshape.jcr.api.JcrTools; /** * A base class for tests that require a new JcrSession and JcrRepository for each test method. */ public abstract class SingleUseAbstractTest extends AbstractJcrRepositoryTest { protected static final String REPO_NAME = "testRepo"; /** * Flag that will signal to {@link #beforeEach()} whether to automatically start the repository using the * {@link #createRepositoryConfiguration(String, Environment) default configuration}. * <p> * There are two ways to run tests with this class: * <ol> * <li>All tests runs against a fresh repository created from the same configuration. In this case, the * {@link #startRepositoryAutomatically} variable should be set to true, and the * {@link #createRepositoryConfiguration(String, Environment)} should be overridden if a non-default configuration is to be * used for all the tests.</li> * <li>Each test requires a fresh repository with a different configuration. In this case, the * {@link #startRepositoryAutomatically} variable should be set to <code>false</code>, and each test should then call one of * the {@link #startRepositoryWithConfiguration(RepositoryConfiguration)} methods before using the repository.</li> * </ol> */ private boolean startRepositoryAutomatically = true; protected Environment environment = new TestingEnvironment(); protected RepositoryConfiguration config; protected JcrRepository repository; protected JcrSession session; protected JcrTools tools; protected void startRepository() throws Exception { config = createRepositoryConfiguration(REPO_NAME, environment); repository = new JcrRepository(config); repository.start(); session = repository.login(); } protected void stopRepository() throws Exception { try { try { if (session != null && session.isLive()) session.logout(); } finally { TestingUtil.killRepositories(repository); } } finally { repository = null; config = null; environment.shutdown(); } } @Override @Before public void beforeEach() throws Exception { super.beforeEach(); if (startRepositoryAutomatically()) { startRepository(); } tools = new JcrTools(); } @After public void afterEach() throws Exception { stopRepository(); } @Override protected JcrSession session() { return session; } protected Session newSession() throws RepositoryException { return repository.login(); } protected Session jcrSession() { return session; } @Override protected JcrRepository repository() { return repository; } /** * Subclasses can override this method to define the RepositoryConfiguration that will be used for the given repository name * and cache container. By default, this method simply returns an empty configuration: * * <pre> * return new RepositoryConfiguration(repositoryName, cacheContainer); * </pre> * * @param repositoryName the name of the repository to create; never null * @param environment the environment that the resulting configuration should use; may be null * @return the repository configuration * @throws Exception if there is a problem creating the configuration */ protected RepositoryConfiguration createRepositoryConfiguration( String repositoryName, Environment environment ) throws Exception { return new RepositoryConfiguration(repositoryName, environment); } /** * Subclasses can call this method at the beginning of each test to shutdown any currently-running repository and to start up * a new repository with the given JSON configuration content. * * @param configContent the JSON string containing the configuration for the repository (note that single quotes can be used * in place of double quote, making it easier for to specify a JSON content as a Java string) * @throws Exception if there was a problem starting the repository * @see #startRepositoryWithConfiguration(Document) * @see #startRepositoryWithConfiguration(InputStream) * @see #startRepositoryWithConfiguration(RepositoryConfiguration) * @see #startRepositoryAutomatically */ protected void startRepositoryWithConfiguration( String configContent ) throws Exception { Document doc = Json.read(configContent); startRepositoryWithConfiguration(doc); } /** * Subclasses can call this method at the beginning of each test to shutdown any currently-running repository and to start up * a new repository with the given JSON configuration document. * * @param doc the JSON document containing the configuration for the repository * @throws Exception if there was a problem starting the repository * @see #startRepositoryWithConfiguration(String) * @see #startRepositoryWithConfiguration(InputStream) * @see #startRepositoryWithConfiguration(RepositoryConfiguration) * @see #startRepositoryAutomatically */ protected void startRepositoryWithConfiguration( Document doc ) throws Exception { RepositoryConfiguration config = new RepositoryConfiguration(doc, REPO_NAME, environment); startRepositoryWithConfiguration(config); } /** * Subclasses can call this method at the beginning of each test to shutdown any currently-running repository and to start up * a new repository with the given JSON configuration content. * * @param configInputStream the input stream containing the JSON content defining the configuration for the repository * @throws Exception if there was a problem starting the repository * @see #startRepositoryWithConfiguration(String) * @see #startRepositoryWithConfiguration(Document) * @see #startRepositoryWithConfiguration(RepositoryConfiguration) * @see #startRepositoryAutomatically */ protected void startRepositoryWithConfiguration( InputStream configInputStream ) throws Exception { RepositoryConfiguration config = RepositoryConfiguration.read(configInputStream, REPO_NAME).with(environment); startRepositoryWithConfiguration(config); } /** * Subclasses can call this method at the beginning of each test to shutdown any currently-running repository and to start up * a new repository with the given repository configuration * * @param configuration the repository configuration object; may not be null can be used in place of double quote, making it * easier for to specify a JSON content as a Java string) * @throws Exception if there was a problem starting the repository * @see #startRepositoryWithConfiguration(String) * @see #startRepositoryWithConfiguration(Document) * @see #startRepositoryWithConfiguration(InputStream) * @see #startRepositoryAutomatically */ protected void startRepositoryWithConfiguration( RepositoryConfiguration configuration ) throws Exception { config = configuration; if (repository != null) { try { repository.shutdown().get(10, TimeUnit.SECONDS); } finally { repository = null; } } repository = new JcrRepository(config); repository.start(); session = repository.login(); } /** * Make sure that a workspace with the supplied name exists. * * @param workspaceName the name of the workspace; may not be null */ protected void predefineWorkspace( String workspaceName ) { assertThat(workspaceName, is(notNullValue())); // Edit the configuration ... Editor editor = config.edit(); EditableDocument workspaces = editor.getOrCreateDocument("workspaces"); EditableArray predefined = workspaces.getOrCreateArray("predefined"); predefined.addStringIfAbsent(workspaceName); // And apply the changes ... Changes changes = editor.getChanges(); if (changes.isEmpty()) return; try { repository.apply(changes); } catch (Exception e) { e.printStackTrace(); throw new AssertionFailedError("Unexpected error while predefining the \"" + workspaceName + "\" workspace:" + e.getMessage()); } } /** * Utility method to get the resource on the classpath given by the supplied name * * @param name the name (or path) of the classpath resource * @return the input stream to the content; may be null if the resource does not exist */ protected InputStream resourceStream( String name ) { return getClass().getClassLoader().getResourceAsStream(name); } /** * Register the node types in the CND file at the given location on the classpath. * * @param resourceName the name of the CND file on the classpath * @throws RepositoryException if there is a problem registering the node types * @throws IOException if the CND file could not be read */ protected void registerNodeTypes( String resourceName ) throws RepositoryException, IOException { InputStream stream = resourceStream(resourceName); assertThat(stream, is(notNullValue())); Workspace workspace = session().getWorkspace(); org.modeshape.jcr.api.nodetype.NodeTypeManager ntMgr = (org.modeshape.jcr.api.nodetype.NodeTypeManager)workspace.getNodeTypeManager(); ntMgr.registerNodeTypes(stream, true); } /** * Import under the supplied parent node the repository content in the XML file at the given location on the classpath. * * @param parent the node under which the content should be imported; may not be null * @param resourceName the name of the XML file on the classpath * @param uuidBehavior the UUID behavior; see {@link ImportUUIDBehavior} for values * @throws RepositoryException if there is a problem importing the content * @throws IOException if the XML file could not be read */ protected void importContent( Node parent, String resourceName, int uuidBehavior ) throws RepositoryException, IOException { InputStream stream = resourceStream(resourceName); assertThat(stream, is(notNullValue())); parent.getSession().getWorkspace().importXML(parent.getPath(), stream, uuidBehavior); } /** * Import under the supplied parent node the repository content in the XML file at the given location on the classpath. * * @param parentPath the path to the node under which the content should be imported; may not be null * @param resourceName the name of the XML file on the classpath * @param uuidBehavior the UUID behavior; see {@link ImportUUIDBehavior} for values * @throws RepositoryException if there is a problem importing the content * @throws IOException if the XML file could not be read */ protected void importContent( String parentPath, String resourceName, int uuidBehavior ) throws RepositoryException, IOException { InputStream stream = resourceStream(resourceName); assertThat(stream, is(notNullValue())); session().getWorkspace().importXML(parentPath, stream, uuidBehavior); } protected boolean startRepositoryAutomatically() { return startRepositoryAutomatically; } protected InputStream resource( String path ) { InputStream stream = getClass().getClassLoader().getResourceAsStream(path); assertThat(stream, is(notNullValue())); return stream; } }
package com.eaw1805.map; import com.eaw1805.data.constants.NationConstants; import com.eaw1805.data.constants.ProductionSiteConstants; import com.eaw1805.data.constants.TerrainConstants; import com.eaw1805.data.dto.common.CoordinateDTO; import com.eaw1805.data.dto.common.SectorDTO; import com.eaw1805.data.dto.converters.SectorConverter; import com.eaw1805.data.model.Game; import com.eaw1805.data.model.Nation; import com.eaw1805.data.model.map.Sector; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.awt.*; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; /** * Loads the tiles of a map. */ public class TilesLoader implements TerrainConstants { /** * a log4j logger to print messages. */ private static final Logger LOGGER = LogManager.getLogger(TilesLoader.class); /** * The maximum X coordinate. */ private final transient int maxX; /** * The maximum Y coordinate. */ private final transient int maxY; /** * The minimumx X coordinate. */ private final transient int minX; /** * The minimum Y coordinate. */ private final transient int minY; /** * The color of each x,y coordinate. Selection is based on color of sector's owner. */ private final transient Color[][] mapNation; /** * The base image assigned to each coordinate. */ private final transient String[][] sectorTiles; /** * The list of additional tiles that are required as image overlays. */ private final transient List<CoordinateDTO> sectorOverlayTiles; /** * Indicates if that fog-of-war will be used. */ private final transient boolean showFOWBorders; /** * Indicates if that fog-of-war will be used. */ private final transient boolean deactivateFOW; /** * Default constructor. * * @param game the game (for calculating winter). * @param sectorList list of sectors to draw. * @param viewer the nation viewing the map. * @param useFOWborders display alternative national borders for foreign nations on the map. * @param disableFOW de-activate FOW rules. */ public TilesLoader(final Game game, final List<Sector> sectorList, final Nation viewer, final boolean useFOWborders, final boolean disableFOW) { showFOWBorders = useFOWborders; deactivateFOW = disableFOW; final List<SectorDTO> sectors = new ArrayList<SectorDTO>(); final Set<SectorDTO> sectorsVisible = new HashSet<SectorDTO>(); int tempXmax = 0; int tempYmax = 0; int tempXmin = Integer.MAX_VALUE; int tempYmin = Integer.MAX_VALUE; final String viewerToken; if (viewer.getId() == NationConstants.NATION_NEUTRAL) { viewerToken = "*"; } else { viewerToken = "*" + viewer.getId() + "*"; } for (final Sector value : sectorList) { final SectorDTO convSector = SectorConverter.convert(value); sectors.add(convSector); tempXmax = Math.max(tempXmax, value.getPosition().getX()); tempYmax = Math.max(tempYmax, value.getPosition().getY()); tempXmin = Math.min(tempXmin, value.getPosition().getX()); tempYmin = Math.min(tempYmin, value.getPosition().getY()); if (value.getTerrain().getId() == TERRAIN_O || value.getNation().getId() == viewer.getId() || (value.getFow() != null && value.getFow().contains(viewerToken)) || deactivateFOW) { convSector.setVisible(true); sectorsVisible.add(convSector); } } LOGGER.debug("Loaded " + sectors.size() + " sectors."); LOGGER.debug("Map size is " + (tempXmin + 1) + "/" + (tempYmin + 1) + "..." + (tempXmax + 1) + "/" + (tempYmax + 1)); maxX = tempXmax; maxY = tempYmax; minX = tempXmin; minY = tempYmin; final SectorDTO[][] mapSector = new SectorDTO[maxX + 1][maxY + 1]; mapNation = new Color[maxX + 1][maxY + 1]; sectorTiles = new String[maxX + 1][maxY + 1]; final CoordinateDTO[][][] sectorOverlays = new CoordinateDTO[maxX + 1][maxY + 1][TilesSelector.LAYER_BORDERS]; // Iterate through all sectors for (SectorDTO sector : sectors) { final int coordX = sector.getX(); final int coordY = sector.getY(); mapSector[coordX][coordY] = sector; if (sector.getNationId() > 0) { mapNation[coordX][coordY] = Color.decode("#" + sector.getNationDTO().getColor().toUpperCase()); } else { mapNation[coordX][coordY] = null; } } // Setup tiles selector final TilesSelector tilesSelector = new TilesSelector(mapSector, game, maxX, maxY, minX, minY); // Identify base image for (int coordX = minX; coordX <= maxX; coordX++) { for (int coordY = minY; coordY <= maxY; coordY++) { final SectorDTO thisSector = mapSector[coordX][coordY]; sectorOverlays[coordX][coordY][0] = new CoordinateDTO(); if (thisSector.getTerrainId() == TERRAIN_O) { sectorTiles[coordX][coordY] = tilesSelector.getOceanCoastTile(coordX, coordY); // Check for off-map trade cities if (thisSector.getTradeCity()) { tilesSelector.addTCityTile(coordX, coordY); } } else { sectorTiles[coordX][coordY] = tilesSelector.getBaseTile(coordX, coordY); } if (deactivateFOW || sectorsVisible.contains(thisSector)) { if ((thisSector.getProductionSiteId() > 0) || (thisSector.getPopulation() > 6)) { if (thisSector.getTradeCity()) { tilesSelector.addTCityTile(coordX, coordY); } else { tilesSelector.addPSiteTile(coordX, coordY); } // Add fortifications if (thisSector.getProductionSiteId() > ProductionSiteConstants.PS_BARRACKS) { tilesSelector.addFortTile(coordX, coordY); } } } if (thisSector.getNatResId() > 0) { tilesSelector.addNatResTile(coordX, coordY, thisSector.getNatResId()); } if (thisSector.getEpidemic()) { tilesSelector.addEpidemicTile(coordX, coordY); } if (thisSector.getRebelled()) { tilesSelector.addRebellionTile(coordX, coordY); } if (thisSector.getConquered()) { tilesSelector.addConquerTile(coordX, coordY); } if (thisSector.getNationId() != NationConstants.NATION_NEUTRAL) { tilesSelector.addBorderName(deactivateFOW || (showFOWBorders && sectorsVisible.contains(thisSector)), coordX, coordY); } } } // Get result of tile selection process sectorOverlayTiles = tilesSelector.getSectorTiles(); } /** * Get the maximum X coordinate of the map. * * @return the maximum X coordinate of the map. */ public int getMaxX() { return maxX; } /** * Get the maximum Y coordinate of the map. * * @return the maximum Y coordinate of the map. */ public int getMaxY() { return maxY; } /** * Get the minimum X coordinate of the map. * * @return the minimum X coordinate of the map. */ public int getMinX() { return minX; } /** * Get the minimum Y coordinate of the map. * * @return the minimum Y coordinate of the map. */ public int getMinY() { return minY; } /** * Get the tile tint color. * * @param coordX the X coordinate. * @param coordY the Y coordinate. * @return the RGB color for tinting the coordinate. */ public Color getTintColor(final int coordX, final int coordY) { return mapNation[coordX][coordY]; } /** * Get the base tiles. * * @param coordX the X coordinate. * @param coordY the Y coordinate. * @return the filename of the base tile. */ public String getSectorTile(final int coordX, final int coordY) { return sectorTiles[coordX][coordY]; } /** * Get overlay tiles. * * @return list of Coordinates with overlay tiles. */ public List<CoordinateDTO> getSectorOverlayTiles() { return sectorOverlayTiles; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.client.api; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertTrue; import static org.testng.Assert.fail; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import org.apache.pulsar.client.impl.PartitionedProducerImpl; import org.apache.pulsar.common.naming.TopicName; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import com.google.common.collect.Sets; import io.netty.util.concurrent.DefaultThreadFactory; public class PartitionedProducerConsumerTest extends ProducerConsumerBase { private static final Logger log = LoggerFactory.getLogger(PartitionedProducerConsumerTest.class); private ExecutorService executor; @BeforeClass @Override protected void setup() throws Exception { super.internalSetup(); super.producerBaseSetup(); executor = Executors.newFixedThreadPool(1, new DefaultThreadFactory("PartitionedProducerConsumerTest")); } @AfterClass @Override protected void cleanup() throws Exception { super.internalCleanup(); executor.shutdown(); } @Test(timeOut = 30000) public void testRoundRobinProducer() throws Exception { log.info("-- Starting {} test --", methodName); int numPartitions = 4; TopicName topicName = TopicName.get("persistent://my-property/my-ns/my-partitionedtopic1-" + System.currentTimeMillis()); admin.topics().createPartitionedTopic(topicName.toString(), numPartitions); Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName.toString()) .enableBatching(false) .messageRoutingMode(MessageRoutingMode.RoundRobinPartition).create(); Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName.toString()) .subscriptionName("my-partitioned-subscriber").subscribe(); for (int i = 0; i < 10; i++) { String message = "my-message-" + i; producer.send(message.getBytes()); } Message<byte[]> msg = null; Set<String> messageSet = Sets.newHashSet(); for (int i = 0; i < 10; i++) { msg = consumer.receive(5, TimeUnit.SECONDS); Assert.assertNotNull(msg, "Message should not be null"); consumer.acknowledge(msg); String receivedMessage = new String(msg.getData()); log.debug("Received message: [{}]", receivedMessage); Assert.assertTrue(messageSet.add(receivedMessage), "Message " + receivedMessage + " already received"); } producer.close(); consumer.unsubscribe(); consumer.close(); admin.topics().deletePartitionedTopic(topicName.toString()); log.info("-- Exiting {} test --", methodName); } @Test(timeOut = 30000) public void testPartitionedTopicNameWithSpecialCharacter() throws Exception { log.info("-- Starting {} test --", methodName); int numPartitions = 4; final String specialCharacter = "! * ' ( ) ; : @ & = + $ , \\ ? % # [ ]"; TopicName topicName = TopicName .get("persistent://my-property/my-ns/my-partitionedtopic1-" + System.currentTimeMillis() + specialCharacter); admin.topics().createPartitionedTopic(topicName.toString(), numPartitions); // Try to create producer which does lookup and create connection with broker Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName.toString()) .messageRoutingMode(MessageRoutingMode.RoundRobinPartition).create(); producer.close(); admin.topics().deletePartitionedTopic(topicName.toString()); log.info("-- Exiting {} test --", methodName); } @Test(timeOut = 30000) public void testSinglePartitionProducer() throws Exception { log.info("-- Starting {} test --", methodName); int numPartitions = 4; TopicName topicName = TopicName .get("persistent://my-property/my-ns/my-partitionedtopic2-" + System.currentTimeMillis()); admin.topics().createPartitionedTopic(topicName.toString(), numPartitions); Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName.toString()) .messageRoutingMode(MessageRoutingMode.SinglePartition).create(); Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName.toString()) .subscriptionName("my-partitioned-subscriber").subscribe(); for (int i = 0; i < 10; i++) { String message = "my-message-" + i; producer.send(message.getBytes()); } Message<byte[]> msg = null; Set<String> messageSet = Sets.newHashSet(); for (int i = 0; i < 10; i++) { msg = consumer.receive(5, TimeUnit.SECONDS); Assert.assertNotNull(msg, "Message should not be null"); consumer.acknowledge(msg); String receivedMessage = new String(msg.getData()); log.debug("Received message: [{}]", receivedMessage); String expectedMessage = "my-message-" + i; testMessageOrderAndDuplicates(messageSet, receivedMessage, expectedMessage); } producer.close(); consumer.unsubscribe(); consumer.close(); admin.topics().deletePartitionedTopic(topicName.toString()); log.info("-- Exiting {} test --", methodName); } @Test(timeOut = 30000) public void testKeyBasedProducer() throws Exception { log.info("-- Starting {} test --", methodName); int numPartitions = 4; TopicName topicName = TopicName .get("persistent://my-property/my-ns/my-partitionedtopic3-" + System.currentTimeMillis()); String dummyKey1 = "dummykey1"; String dummyKey2 = "dummykey2"; admin.topics().createPartitionedTopic(topicName.toString(), numPartitions); Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName.toString()).create(); Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName.toString()) .subscriptionName("my-partitioned-subscriber").subscribe(); for (int i = 0; i < 5; i++) { String message = "my-message-" + i; producer.newMessage().key(dummyKey1).value(message.getBytes()).send(); } for (int i = 5; i < 10; i++) { String message = "my-message-" + i; producer.newMessage().key(dummyKey2).value(message.getBytes()).send(); } Set<String> messageSet = Sets.newHashSet(); for (int i = 0; i < 10; i++) { Message<byte[]> msg = consumer.receive(5, TimeUnit.SECONDS); Assert.assertNotNull(msg, "Message should not be null"); consumer.acknowledge(msg); String receivedMessage = new String(msg.getData()); log.debug("Received message: [{}]", receivedMessage); testKeyBasedOrder(messageSet, receivedMessage); } producer.close(); consumer.unsubscribe(); consumer.close(); admin.topics().deletePartitionedTopic(topicName.toString()); log.info("-- Exiting {} test --", methodName); } private void testKeyBasedOrder(Set<String> messageSet, String message) { int index = Integer.parseInt(message.substring(message.lastIndexOf('-') + 1)); if (index != 0 && index != 5) { Assert.assertTrue(messageSet.contains("my-message-" + (index - 1)), "Message my-message-" + (index - 1) + " should come before my-message-" + index); } Assert.assertTrue(messageSet.add(message), "Received duplicate message " + message); } @Test(timeOut = 100000) public void testPauseAndResume() throws Exception { log.info("-- Starting {} test --", methodName); int numPartitions = 2; String topicName = TopicName .get("persistent://my-property/my-ns/my-partitionedtopic-pr-" + System.currentTimeMillis()).toString(); admin.topics().createPartitionedTopic(topicName, numPartitions); int receiverQueueSize = 20; // number of permits broker has per partition when consumer initially subscribes int numMessages = receiverQueueSize * numPartitions; AtomicReference<CountDownLatch> latch = new AtomicReference<>(new CountDownLatch(numMessages)); AtomicInteger received = new AtomicInteger(); Consumer<byte[]> consumer = pulsarClient.newConsumer().receiverQueueSize(receiverQueueSize) .topic(topicName) .subscriptionName("my-partitioned-subscriber").messageListener((c1, msg) -> { Assert.assertNotNull(msg, "Message cannot be null"); String receivedMessage = new String(msg.getData()); log.debug("Received message [{}] in the listener", receivedMessage); c1.acknowledgeAsync(msg); received.incrementAndGet(); latch.get().countDown(); }).subscribe(); Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName).create(); consumer.pause(); for (int i = 0; i < numMessages * 2; i++) producer.send(("my-message-" + i).getBytes()); log.info("Waiting for message listener to ack " + numMessages + " messages"); assertEquals(latch.get().await(numMessages, TimeUnit.SECONDS), true, "Timed out waiting for message listener acks"); log.info("Giving message listener an opportunity to receive messages while paused"); Thread.sleep(2000); // hopefully this is long enough assertEquals(received.intValue(), numMessages, "Consumer received messages while paused"); latch.set(new CountDownLatch(numMessages)); consumer.resume(); log.info("Waiting for message listener to ack all messages"); assertEquals(latch.get().await(numMessages, TimeUnit.SECONDS), true, "Timed out waiting for message listener acks"); consumer.close(); producer.close(); log.info("-- Exiting {} test --", methodName); } @Test(timeOut = 30000) public void testInvalidSequence() throws Exception { log.info("-- Starting {} test --", methodName); int numPartitions = 4; TopicName topicName = TopicName .get("persistent://my-property/my-ns/my-partitionedtopic4-" + System.currentTimeMillis()); admin.topics().createPartitionedTopic(topicName.toString(), numPartitions); Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName.toString()) .subscriptionName("my-subscriber-name").subscribe(); try { Message<byte[]> msg = MessageBuilder.create().setContent("InvalidMessage".getBytes()).build(); consumer.acknowledge(msg); } catch (PulsarClientException.InvalidMessageException e) { // ok } consumer.close(); try { consumer.receive(); Assert.fail("Should fail"); } catch (PulsarClientException.AlreadyClosedException e) { // ok } try { consumer.unsubscribe(); Assert.fail("Should fail"); } catch (PulsarClientException.AlreadyClosedException e) { // ok } Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName.toString()) .enableBatching(false) .messageRoutingMode(MessageRoutingMode.SinglePartition) .create(); producer.close(); try { producer.send("message".getBytes()); Assert.fail("Should fail"); } catch (PulsarClientException.AlreadyClosedException e) { // ok } admin.topics().deletePartitionedTopic(topicName.toString()); } @Test(timeOut = 30000) public void testSillyUser() throws Exception { int numPartitions = 4; TopicName topicName = TopicName .get("persistent://my-property/my-ns/my-partitionedtopic5-" + System.currentTimeMillis()); admin.topics().createPartitionedTopic(topicName.toString(), numPartitions); Producer<byte[]> producer = null; Consumer<byte[]> consumer = null; try { pulsarClient.newProducer().messageRouter(null); Assert.fail("should fail"); } catch (NullPointerException e) { // ok } try { pulsarClient.newProducer().messageRoutingMode(null); Assert.fail("should fail"); } catch (NullPointerException e) { // ok } try { producer = pulsarClient.newProducer().topic(topicName.toString()).enableBatching(false) .messageRoutingMode(MessageRoutingMode.SinglePartition).create(); consumer = pulsarClient.newConsumer().topic(topicName.toString()).subscriptionName("my-sub").subscribe(); producer.send("message1".getBytes()); producer.send("message2".getBytes()); /* Message<byte[]> msg1 = */ consumer.receive(); Message<byte[]> msg2 = consumer.receive(); consumer.acknowledgeCumulative(msg2); } finally { producer.close(); consumer.unsubscribe(); consumer.close(); } admin.topics().deletePartitionedTopic(topicName.toString()); } @Test(timeOut = 30000) public void testDeletePartitionedTopic() throws Exception { int numPartitions = 4; TopicName topicName = TopicName .get("persistent://my-property/my-ns/my-partitionedtopic6-" + System.currentTimeMillis()); admin.topics().createPartitionedTopic(topicName.toString(), numPartitions); Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName.toString()).create(); Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName.toString()).subscriptionName("my-sub") .subscribe(); consumer.unsubscribe(); consumer.close(); producer.close(); admin.topics().deletePartitionedTopic(topicName.toString()); Producer<byte[]> producer1 = pulsarClient.newProducer().topic(topicName.toString()).create(); if (producer1 instanceof PartitionedProducerImpl) { Assert.fail("should fail since partitioned topic was deleted"); } } @Test(timeOut = 30000) public void testAsyncPartitionedProducerConsumer() throws Exception { log.info("-- Starting {} test --", methodName); final int totalMsg = 100; final Set<String> produceMsgs = Sets.newHashSet(); final Set<String> consumeMsgs = Sets.newHashSet(); int numPartitions = 4; TopicName topicName = TopicName .get("persistent://my-property/my-ns/my-partitionedtopic1-" + System.currentTimeMillis()); admin.topics().createPartitionedTopic(topicName.toString(), numPartitions); Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName.toString()) .enableBatching(false) .messageRoutingMode(MessageRoutingMode.RoundRobinPartition).create(); Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName.toString()) .subscriptionName("my-partitioned-subscriber").subscriptionType(SubscriptionType.Shared).subscribe(); // produce messages for (int i = 0; i < totalMsg; i++) { String message = "my-message-" + i; produceMsgs.add(message); producer.send(message.getBytes()); } log.info(" start receiving messages :"); // receive messages CountDownLatch latch = new CountDownLatch(totalMsg); receiveAsync(consumer, totalMsg, 0, latch, consumeMsgs, executor); latch.await(); // verify message produced correctly assertEquals(produceMsgs.size(), totalMsg); // verify produced and consumed messages must be exactly same produceMsgs.removeAll(consumeMsgs); assertTrue(produceMsgs.isEmpty()); producer.close(); consumer.unsubscribe(); consumer.close(); admin.topics().deletePartitionedTopic(topicName.toString()); log.info("-- Exiting {} test --", methodName); } @Test(timeOut = 30000) public void testAsyncPartitionedProducerConsumerQueueSizeOne() throws Exception { log.info("-- Starting {} test --", methodName); final int totalMsg = 100; final Set<String> produceMsgs = Sets.newHashSet(); final Set<String> consumeMsgs = Sets.newHashSet(); int numPartitions = 4; TopicName topicName = TopicName .get("persistent://my-property/my-ns/my-partitionedtopic1-" + System.currentTimeMillis()); admin.topics().createPartitionedTopic(topicName.toString(), numPartitions); Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName.toString()) .enableBatching(false) .messageRoutingMode(MessageRoutingMode.RoundRobinPartition).create(); Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName.toString()) .subscriptionName("my-partitioned-subscriber").receiverQueueSize(1).subscribe(); // produce messages for (int i = 0; i < totalMsg; i++) { String message = "my-message-" + i; produceMsgs.add(message); producer.send(message.getBytes()); } log.info(" start receiving messages :"); // receive messages CountDownLatch latch = new CountDownLatch(totalMsg); receiveAsync(consumer, totalMsg, 0, latch, consumeMsgs, executor); latch.await(); // verify message produced correctly assertEquals(produceMsgs.size(), totalMsg); // verify produced and consumed messages must be exactly same produceMsgs.removeAll(consumeMsgs); assertTrue(produceMsgs.isEmpty()); producer.close(); consumer.unsubscribe(); consumer.close(); admin.topics().deletePartitionedTopic(topicName.toString()); log.info("-- Exiting {} test --", methodName); } /** * It verifies that consumer consumes from all the partitions fairly. * * @throws Exception */ @Test(timeOut = 30000) public void testFairDistributionForPartitionConsumers() throws Exception { log.info("-- Starting {} test --", methodName); final int numPartitions = 2; final String topicName = "persistent://my-property/my-ns/my-topic-" + System.currentTimeMillis(); final String producer1Msg = "producer1"; final String producer2Msg = "producer2"; final int queueSize = 10; admin.topics().createPartitionedTopic(topicName, numPartitions); Producer<byte[]> producer1 = pulsarClient.newProducer().topic(topicName + "-partition-0") .messageRoutingMode(MessageRoutingMode.RoundRobinPartition).create(); Producer<byte[]> producer2 = pulsarClient.newProducer().topic(topicName + "-partition-1") .messageRoutingMode(MessageRoutingMode.RoundRobinPartition).create(); Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName) .subscriptionName("my-partitioned-subscriber").receiverQueueSize(queueSize).subscribe(); int partition2Msgs = 0; // produce messages on Partition-1: which will makes partitioned-consumer's queue full for (int i = 0; i < queueSize - 1; i++) { producer1.send((producer1Msg + "-" + i).getBytes()); } Thread.sleep(1000); // now queue is full : so, partition-2 consumer will be pushed to paused-consumer list for (int i = 0; i < 5; i++) { producer2.send((producer2Msg + "-" + i).getBytes()); } // now, Queue should take both partition's messages // also: we will keep producing messages to partition-1 int produceMsgInPartition1AfterNumberOfConsumeMessages = 2; for (int i = 0; i < 3 * queueSize; i++) { Message<byte[]> msg = consumer.receive(); partition2Msgs += (new String(msg.getData())).startsWith(producer2Msg) ? 1 : 0; if (i >= produceMsgInPartition1AfterNumberOfConsumeMessages) { producer1.send(producer1Msg.getBytes()); Thread.sleep(100); } } assertTrue(partition2Msgs >= 4); producer1.close(); producer2.close(); consumer.unsubscribe(); consumer.close(); admin.topics().deletePartitionedTopic(topicName); log.info("-- Exiting {} test --", methodName); } private void receiveAsync(Consumer<byte[]> consumer, int totalMessage, int currentMessage, CountDownLatch latch, final Set<String> consumeMsg, ExecutorService executor) throws PulsarClientException { if (currentMessage < totalMessage) { CompletableFuture<Message<byte[]>> future = consumer.receiveAsync(); future.handle((msg, exception) -> { if (exception == null) { // add message to consumer-queue to verify with produced messages consumeMsg.add(new String(msg.getData())); try { consumer.acknowledge(msg); } catch (PulsarClientException e1) { fail("message acknowledge failed", e1); } // consume next message executor.execute(() -> { try { receiveAsync(consumer, totalMessage, currentMessage + 1, latch, consumeMsg, executor); } catch (PulsarClientException e) { fail("message receive failed", e); } }); latch.countDown(); } return null; }); } } }
/* * Copyright (C) 2015 Willi Ye * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.grarak.kerneladiutor.fragments.kernel; import android.content.Context; import android.os.Bundle; import android.os.Handler; import android.support.v7.widget.AppCompatCheckBox; import android.support.v7.widget.AppCompatTextView; import android.view.View; import android.widget.CheckBox; import android.widget.LinearLayout; import android.widget.ProgressBar; import com.grarak.kerneladiutor.R; import com.grarak.kerneladiutor.elements.DAdapter; import com.grarak.kerneladiutor.elements.DDivider; import com.grarak.kerneladiutor.elements.cards.CardViewItem; import com.grarak.kerneladiutor.elements.cards.PopupCardView; import com.grarak.kerneladiutor.elements.cards.SeekBarCardView; import com.grarak.kerneladiutor.elements.cards.SwitchCardView; import com.grarak.kerneladiutor.elements.cards.UsageCardView; import com.grarak.kerneladiutor.fragments.PathReaderFragment; import com.grarak.kerneladiutor.fragments.RecyclerViewFragment; import com.grarak.kerneladiutor.fragments.ViewPagerFragment; import com.grarak.kerneladiutor.utils.Constants; import com.grarak.kerneladiutor.utils.Utils; import com.grarak.kerneladiutor.utils.kernel.CPU; import com.grarak.kerneladiutor.utils.root.Control; import com.kerneladiutor.library.root.RootFile; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Locale; /** * Created by willi on 07.04.15. */ public class CPUFragment extends ViewPagerFragment implements Constants { private static CPUFragment cpuFragment; private CPUPart cpuPart; private GovernorPart governorPart; private int core; private String cluster = ""; @Override public void preInit(Bundle savedInstanceState) { super.preInit(savedInstanceState); showTabs(false); } @Override public void init(Bundle savedInstanceState) { super.init(savedInstanceState); cpuFragment = this; allowSwipe(false); addFragment(new ViewPagerItem(cpuPart == null ? cpuPart = new CPUPart() : cpuPart, null)); addFragment(new ViewPagerItem(governorPart == null ? governorPart = new GovernorPart() : governorPart, null)); } @Override public void onSwipe(int page) { super.onSwipe(page); allowSwipe(page == 1); } @Override public boolean onBackPressed() { if (getCurrentPage() == 1) { setCurrentItem(0); return true; } return false; } public static class CPUPart extends RecyclerViewFragment implements View.OnClickListener, PopupCardView.DPopupCard.OnDPopupCardListener, CardViewItem.DCardView.OnDCardListener, SeekBarCardView.DSeekBarCard.OnDSeekBarCardListener, SwitchCardView.DSwitchCard.OnDSwitchCardListener { List<DAdapter.DView> views = new ArrayList<>(); List<String> freqs = new ArrayList<>(); private UsageCardView.DUsageCard mUsageCard; private CardViewItem.DCardView mTempCard; private AppCompatCheckBox[] mCoreCheckBox; private ProgressBar[] mCoreProgressBar; private AppCompatTextView[] mCoreUsageText; private AppCompatTextView[] mCoreFreqText; private PopupCardView.DPopupCard mMaxFreqCard, mMinFreqCard, mMaxScreenOffFreqCard, mMSM_LimiterResumeMaxFreqNoPerCoreCard, mMSM_LimiterSuspendMinFreqNoPerCoreCard, mMSM_LimiterSuspendMaxFreqNoPerCoreCard; private PopupCardView.DPopupCard mMSM_LimiterResumeMaxFreqCard[], mMSM_LimiterSuspendMinFreqCard[], mMSM_LimiterSuspendMaxFreqCard[] ; private PopupCardView.DPopupCard mGovernorCard; private PopupCardView.DPopupCard mMSM_LimiterGovernorNoPerCoreCard, mMSM_LimiterGovernorPerCoreCard[]; private CardViewItem.DCardView mGovernorTunableNoPerCoreCard; private CardViewItem.DCardView mGovernorTunableCoreCard[]; private AppCompatCheckBox[] mCoreCheckBoxLITTLE; private ProgressBar[] mCoreProgressBarLITTLE; private AppCompatTextView[] mCoreUsageTextLITTLE; private AppCompatTextView[] mCoreFreqTextLITTLE; private PopupCardView.DPopupCard mMaxFreqLITTLECard, mMinFreqLITTLECard, mMaxScreenOffFreqLITTLECard; private PopupCardView.DPopupCard mGovernorLITTLECard; private CardViewItem.DCardView mGovernorTunableLITTLECard; private PopupCardView.DPopupCard mMcPowerSavingCard; private SwitchCardView.DSwitchCard mPowerSavingWqCard; private PopupCardView.DPopupCard mCFSSchedulerCard; private SwitchCardView.DSwitchCard mCpuQuietEnableCard; private PopupCardView.DPopupCard mCpuQuietGovernorCard; private SwitchCardView.DSwitchCard mCpuBoostEnableCard; private SwitchCardView.DSwitchCard mCpuBoostDebugMaskCard; private SeekBarCardView.DSeekBarCard mCpuBoostMsCard; private PopupCardView.DPopupCard mCpuBoostSyncThresholdCard; private SeekBarCardView.DSeekBarCard mCpuBoostInputMsCard; private PopupCardView.DPopupCard[] mCpuBoostInputFreqCard; private SwitchCardView.DSwitchCard mCpuBoostWakeupCard, mCpuInputBoostEnableCard; private SwitchCardView.DSwitchCard mCpuBoostHotplugCard; private SwitchCardView.DSwitchCard mMSM_Limiter_EnableCard, mPerCoreControlCard; private SwitchCardView.DSwitchCard mCpuTouchBoostCard; private SeekBarCardView.DSeekBarCard mAlu_T_BoostMsCard; private SeekBarCardView.DSeekBarCard mAlu_T_BoostMiiCard; private SeekBarCardView.DSeekBarCard mAlu_T_BoostCpusCard; private PopupCardView.DPopupCard mAlu_T_BoostFreqCard; @Override public String getClassName() { return CPUFragment.class.getSimpleName(); } @Override public void init(Bundle savedInstanceState) { super.init(savedInstanceState); usageInit(); if (CPU.hasTemp()) tempInit(); if (CPU.getFreqs() != null) { if (CPU.isBigLITTLE()) { DDivider bigDivider = new DDivider(); bigDivider.setText(getString(R.string.big).toLowerCase(Locale.getDefault())); addView(bigDivider); } coreInit(); freqInit(); } if (CPU.getAvailableGovernors() != null) governorInit(); DDivider othersDivider = null; if (CPU.isBigLITTLE()) { DDivider LITTLEDivider = new DDivider(); LITTLEDivider.setText(getString(R.string.little).toUpperCase(Locale.getDefault())); addView(LITTLEDivider); if (CPU.getFreqs(CPU.getLITTLEcore()) != null) { coreLITTLEInit(); freqLITTLEInit(); } if (CPU.getAvailableGovernors(CPU.getLITTLEcore()) != null) governorLITTLEInit(); othersDivider = new DDivider(); othersDivider.setText(getString(R.string.other)); addView(othersDivider); } int count = getCount(); if (CPU.hasMcPowerSaving()) mcPowerSavingInit(); if (CPU.hasPowerSavingWq()) powerSavingWqInit(); if (CPU.hasCFSScheduler()) cfsSchedulerInit(); if (CPU.hasAlu_T_Boost()) Alu_T_BoostInit(); if (CPU.hasCpuQuiet()) cpuQuietInit(); if (CPU.hasCpuBoost()) cpuBoostInit(); if (CPU.hasCpuTouchBoost()) cpuTouchBoostInit(); if (othersDivider != null && (count == getCount() || getView(count) instanceof DDivider)) removeView(othersDivider); } private void usageInit() { mUsageCard = new UsageCardView.DUsageCard(); mUsageCard.setText(getString(R.string.cpu_usage)); addView(mUsageCard); } private void tempInit() { mTempCard = new CardViewItem.DCardView(); mTempCard.setTitle(getString(R.string.cpu_temp)); mTempCard.setDescription(CPU.getTemp()); addView(mTempCard); } private void coreInit() { LinearLayout layout = new LinearLayout(getActivity()); layout.setOrientation(LinearLayout.VERTICAL); mCoreCheckBox = new AppCompatCheckBox[CPU.getBigCoreRange().size()]; mCoreProgressBar = new ProgressBar[mCoreCheckBox.length]; mCoreUsageText = new AppCompatTextView[mCoreCheckBox.length]; mCoreFreqText = new AppCompatTextView[mCoreCheckBox.length]; for (int i = 0; i < mCoreCheckBox.length; i++) { View view = inflater.inflate(R.layout.coreview, container, false); mCoreCheckBox[i] = (AppCompatCheckBox) view.findViewById(R.id.core_checkbox); mCoreCheckBox[i].setText(String.format(getString(R.string.core), i + 1)); mCoreCheckBox[i].setOnClickListener(this); mCoreProgressBar[i] = (ProgressBar) view.findViewById(R.id.progressbar); mCoreProgressBar[i].setMax(CPU.getFreqs().size()); mCoreUsageText[i] = (AppCompatTextView) view.findViewById(R.id.usage); mCoreFreqText[i] = (AppCompatTextView) view.findViewById(R.id.freq); layout.addView(view); } CardViewItem.DCardView coreCard = new CardViewItem.DCardView(); coreCard.setTitle(getString(R.string.current_freq)); coreCard.setView(layout); addView(coreCard); } private void freqInit() { views.clear(); freqs.clear(); if (CPU.hasMSM_Limiter() && CPU.isMSM_LimiterActive()) { DDivider mMSM_LimiterHeaderCard = new DDivider(); if (CPU.hasMSM_Limiter_Version()) mMSM_LimiterHeaderCard.setText("MSM_Limiter: Version " + Double.toString(CPU.getMSM_Limiter_Version())); else mMSM_LimiterHeaderCard.setText("MSM_Limiter"); addView(mMSM_LimiterHeaderCard); } if (CPU.hasMSM_Limiter()) { mMSM_Limiter_EnableCard = new SwitchCardView.DSwitchCard(); mMSM_Limiter_EnableCard.setTitle(getString(R.string.cpu_msm_limiter)); mMSM_Limiter_EnableCard.setDescription(getString(R.string.cpu_msm_limiter_summary)); mMSM_Limiter_EnableCard.setChecked(CPU.isMSM_LimiterActive()); mMSM_Limiter_EnableCard.setOnDSwitchCardListener(this); views.add(mMSM_Limiter_EnableCard); } if (CPU.hasPerCoreControl() && CPU.isMSM_LimiterActive()) { mPerCoreControlCard = new SwitchCardView.DSwitchCard(); mPerCoreControlCard.setDescription(getString(R.string.cpu_per_core_control)); mPerCoreControlCard.setChecked(CPU.isPerCoreControlActive(getActivity())); mPerCoreControlCard.setOnDSwitchCardListener(this); views.add(mPerCoreControlCard); } if (!CPU.isPerCoreControlActive(getActivity()) && !CPU.isMSM_LimiterActive()) { for (int freq : CPU.getFreqs()) freqs.add(freq / 1000 + getString(R.string.mhz)); mMaxFreqCard = new PopupCardView.DPopupCard(freqs); mMaxFreqCard.setTitle(getString(R.string.cpu_max_freq)); mMaxFreqCard.setDescription(getString(R.string.cpu_max_freq_summary)); mMaxFreqCard.setItem(CPU.getMaxFreq(true) / 1000 + getString(R.string.mhz)); mMaxFreqCard.setOnDPopupCardListener(this); mMinFreqCard = new PopupCardView.DPopupCard(freqs); mMinFreqCard.setTitle(getString(R.string.cpu_min_freq)); mMinFreqCard.setDescription(getString(R.string.cpu_min_freq_summary)); mMinFreqCard.setItem(CPU.getMinFreq(true) / 1000 + getString(R.string.mhz)); mMinFreqCard.setOnDPopupCardListener(this); views.add(mMaxFreqCard); views.add(mMinFreqCard); } if (CPU.isMSM_LimiterActive()) { if (!CPU.isPerCoreControlActive(getActivity())){ if (CPU.hasMSM_LimiterResumeMaxFreq()) { List<String> freqs = new ArrayList<>(); for (int freq : CPU.getFreqs()) freqs.add(freq / 1000 + getString(R.string.mhz)); mMSM_LimiterResumeMaxFreqNoPerCoreCard = new PopupCardView.DPopupCard(freqs); mMSM_LimiterResumeMaxFreqNoPerCoreCard.setTitle(getString(R.string.cpu_msm_limiter_resume_max)); mMSM_LimiterResumeMaxFreqNoPerCoreCard.setDescription(getString(R.string.cpu_msm_limiter_resume_max_summary)); mMSM_LimiterResumeMaxFreqNoPerCoreCard.setItem(CPU.getMSM_LimiterResumeMaxFreq(-1) / 1000 + getString(R.string.mhz)); mMSM_LimiterResumeMaxFreqNoPerCoreCard.setOnDPopupCardListener(this); views.add(mMSM_LimiterResumeMaxFreqNoPerCoreCard); } if (CPU.hasMSM_LimiterSuspendMinFreq()) { List<String> freqs = new ArrayList<>(); for (int freq : CPU.getFreqs()) freqs.add(freq / 1000 + getString(R.string.mhz)); mMSM_LimiterSuspendMinFreqNoPerCoreCard = new PopupCardView.DPopupCard(freqs); mMSM_LimiterSuspendMinFreqNoPerCoreCard.setTitle(getString(R.string.cpu_msm_limiter_suspend_min)); mMSM_LimiterSuspendMinFreqNoPerCoreCard.setDescription(getString(R.string.cpu_msm_limiter_suspend_min_summary)); mMSM_LimiterSuspendMinFreqNoPerCoreCard.setItem(CPU.getMSM_LimiterSuspendMinFreq(-1) / 1000 + getString(R.string.mhz)); mMSM_LimiterSuspendMinFreqNoPerCoreCard.setOnDPopupCardListener(this); views.add(mMSM_LimiterSuspendMinFreqNoPerCoreCard); } } else if (CPU.isPerCoreControlActive(getActivity())) { for (int freq : CPU.getFreqs()) freqs.add(freq / 1000 + getString(R.string.mhz)); DDivider mMaxFreqPerCoreCard = new DDivider(); mMaxFreqPerCoreCard.setText("Max Frequency per Core"); mMaxFreqPerCoreCard.setDescription(getString(R.string.cpu_msm_limiter_resume_max_summary_per_core)); views.add(mMaxFreqPerCoreCard); mMSM_LimiterResumeMaxFreqCard = new PopupCardView.DPopupCard[CPU.getCoreCount()]; for (int i = 0; i < CPU.getCoreCount(); i++) { mMSM_LimiterResumeMaxFreqCard[i] = new PopupCardView.DPopupCard(freqs); mMSM_LimiterResumeMaxFreqCard[i].setTitle(String.format(getString(R.string.cpu_msm_limiter_resume_max_per_core), i)); mMSM_LimiterResumeMaxFreqCard[i].setDescription(""); mMSM_LimiterResumeMaxFreqCard[i].setItem(CPU.getMSM_LimiterResumeMaxFreq(i) / 1000 + getString(R.string.mhz)); mMSM_LimiterResumeMaxFreqCard[i].setOnDPopupCardListener(this); views.add(mMSM_LimiterResumeMaxFreqCard[i]); } DDivider mMinFreqPerCoreCard = new DDivider(); mMinFreqPerCoreCard.setText("Suspend Min Frequency per Core"); mMinFreqPerCoreCard.setDescription(getString(R.string.cpu_msm_limiter_suspend_min_summary_per_core)); views.add(mMinFreqPerCoreCard); mMSM_LimiterSuspendMinFreqCard = new PopupCardView.DPopupCard[CPU.getCoreCount()]; for (int i = 0; i < CPU.getCoreCount(); i++) { mMSM_LimiterSuspendMinFreqCard[i] = new PopupCardView.DPopupCard(freqs); mMSM_LimiterSuspendMinFreqCard[i].setTitle(String.format(getString(R.string.cpu_msm_limiter_suspend_min_per_core), i)); mMSM_LimiterSuspendMinFreqCard[i].setDescription(""); mMSM_LimiterSuspendMinFreqCard[i].setItem(CPU.getMSM_LimiterSuspendMinFreq(i) / 1000 + getString(R.string.mhz)); mMSM_LimiterSuspendMinFreqCard[i].setOnDPopupCardListener(this); views.add(mMSM_LimiterSuspendMinFreqCard[i]); } if (CPU.hasMSM_LimiterSuspendMaxFreq() && CPU.getMSM_Limiter_Version() > 5.1) { DDivider mMSM_LimiterSuspendMaxFreqDivCard = new DDivider(); mMSM_LimiterSuspendMaxFreqDivCard.setText("Suspend Max Frequency per Core"); mMSM_LimiterSuspendMaxFreqDivCard.setDescription(getString(R.string.cpu_msm_limiter_suspend_max_summary_per_core)); views.add(mMSM_LimiterSuspendMaxFreqDivCard); mMSM_LimiterSuspendMaxFreqCard = new PopupCardView.DPopupCard[CPU.getCoreCount()]; for (int i = 0; i < CPU.getCoreCount(); i++) { mMSM_LimiterSuspendMaxFreqCard[i] = new PopupCardView.DPopupCard(freqs); mMSM_LimiterSuspendMaxFreqCard[i].setTitle(String.format(getString(R.string.cpu_msm_limiter_suspend_min_per_core), i)); mMSM_LimiterSuspendMaxFreqCard[i].setDescription(""); mMSM_LimiterSuspendMaxFreqCard[i].setItem(CPU.getMSM_LimiterSuspendMaxFreq(i) / 1000 + getString(R.string.mhz)); mMSM_LimiterSuspendMaxFreqCard[i].setOnDPopupCardListener(this); views.add(mMSM_LimiterSuspendMaxFreqCard[i]); } } } if (CPU.hasMSM_LimiterSuspendMaxFreq() && (CPU.getMSM_Limiter_Version() < 5.2) || !CPU.isPerCoreControlActive(getActivity())) { if (CPU.isPerCoreControlActive(getActivity())) { DDivider mMSM_LimiterSuspendMaxFreqDivCard = new DDivider(); mMSM_LimiterSuspendMaxFreqDivCard.setText("Suspend Max Frequency"); mMSM_LimiterSuspendMaxFreqDivCard.setDescription(getString(R.string.cpu_msm_limiter_suspend_max_summary)); views.add(mMSM_LimiterSuspendMaxFreqDivCard); } List<String> freqs = new ArrayList<>(); for (int freq : CPU.getFreqs()) freqs.add(freq / 1000 + getString(R.string.mhz)); mMSM_LimiterSuspendMaxFreqNoPerCoreCard = new PopupCardView.DPopupCard(freqs); mMSM_LimiterSuspendMaxFreqNoPerCoreCard.setTitle(getString(R.string.cpu_msm_limiter_suspend_max)); mMSM_LimiterSuspendMaxFreqNoPerCoreCard.setDescription(getString(R.string.cpu_msm_limiter_suspend_max_summary)); mMSM_LimiterSuspendMaxFreqNoPerCoreCard.setItem(CPU.getMSM_LimiterSuspendMaxFreq(-1) / 1000 + getString(R.string.mhz)); mMSM_LimiterSuspendMaxFreqNoPerCoreCard.setOnDPopupCardListener(this); views.add(mMSM_LimiterSuspendMaxFreqNoPerCoreCard); } } if (CPU.hasMaxScreenOffFreq()) { mMaxScreenOffFreqCard = new PopupCardView.DPopupCard(freqs); mMaxScreenOffFreqCard.setTitle(getString(R.string.cpu_max_screen_off_freq)); mMaxScreenOffFreqCard.setDescription(getString(R.string.cpu_max_screen_off_freq_summary)); mMaxScreenOffFreqCard.setItem(CPU.getMaxScreenOffFreq(true) / 1000 + getString(R.string.mhz)); mMaxScreenOffFreqCard.setOnDPopupCardListener(this); views.add(mMaxScreenOffFreqCard); } addAllViews(views); } private void governorInit() { views.clear(); if (!CPU.isPerCoreControlActive(getActivity()) && !CPU.isMSM_LimiterActive()) { mGovernorCard = new PopupCardView.DPopupCard(CPU.getAvailableGovernors()); mGovernorCard.setTitle(getString(R.string.cpu_governor)); mGovernorCard.setDescription(getString(R.string.cpu_governor_summary)); mGovernorCard.setItem(CPU.getCurGovernor(true)); mGovernorCard.setOnDPopupCardListener(this); views.add(mGovernorCard); } if (CPU.isMSM_LimiterActive()) { if (!CPU.isPerCoreControlActive(getActivity())) { mMSM_LimiterGovernorNoPerCoreCard = new PopupCardView.DPopupCard(CPU.getAvailableGovernors()); mMSM_LimiterGovernorNoPerCoreCard.setTitle(getString(R.string.cpu_governor)); mMSM_LimiterGovernorNoPerCoreCard.setDescription(getString(R.string.cpu_governor_summary)); mMSM_LimiterGovernorNoPerCoreCard.setItem(CPU.getMSMLimiterGovernor(-1)); mMSM_LimiterGovernorNoPerCoreCard.setOnDPopupCardListener(this); views.add(mMSM_LimiterGovernorNoPerCoreCard); } if (CPU.isPerCoreControlActive(getActivity())) { DDivider mMSM_LimiterGovernorPerCoreDivCard = new DDivider(); mMSM_LimiterGovernorPerCoreDivCard.setText("Select Governor per Core"); mMSM_LimiterGovernorPerCoreDivCard.setDescription(getString(R.string.cpu_governor_summary)); views.add(mMSM_LimiterGovernorPerCoreDivCard); mMSM_LimiterGovernorPerCoreCard = new PopupCardView.DPopupCard[CPU.getCoreCount()]; for (int i = 0; i < CPU.getCoreCount(); i++) { mMSM_LimiterGovernorPerCoreCard[i] = new PopupCardView.DPopupCard(CPU.getAvailableGovernors()); mMSM_LimiterGovernorPerCoreCard[i].setTitle(String.format(getString(R.string.cpu_msm_limiter_governor_per_core), i)); mMSM_LimiterGovernorPerCoreCard[i].setDescription(""); mMSM_LimiterGovernorPerCoreCard[i].setItem(CPU.getMSMLimiterGovernor(i)); mMSM_LimiterGovernorPerCoreCard[i].setOnDPopupCardListener(this); views.add(mMSM_LimiterGovernorPerCoreCard[i]); } } } if (!CPU.isPerCoreControlActive(getActivity())) { mGovernorTunableNoPerCoreCard = new CardViewItem.DCardView(); mGovernorTunableNoPerCoreCard.setTitle(getString(R.string.cpu_governor_tunables)); mGovernorTunableNoPerCoreCard.setDescription(getString(R.string.cpu_governor_tunables_summary)); mGovernorTunableNoPerCoreCard.setOnDCardListener(this); views.add(mGovernorTunableNoPerCoreCard); } if (CPU.isPerCoreControlActive(getActivity())) { DDivider mGovernorTunablePerCoreDivider = new DDivider(); mGovernorTunablePerCoreDivider.setText(getString(R.string.cpu_governor_tunables_per_core_header)); mGovernorTunablePerCoreDivider.setDescription(getString(R.string.cpu_governor_tunables_per_core_summary)); views.add(mGovernorTunablePerCoreDivider); mGovernorTunableCoreCard = new CardViewItem.DCardView[CPU.getCoreCount()]; for (int i = 0; i < CPU.getCoreCount(); i++) { mGovernorTunableCoreCard[i] = new CardViewItem.DCardView(); mGovernorTunableCoreCard[i].setTitle(String.format(getString(R.string.cpu_governor_tunables_per_core_tunable), i) + " " + CPU.getMSMLimiterGovernor(i)); mGovernorTunableCoreCard[i].setOnDCardListener(this); views.add(mGovernorTunableCoreCard[i]); } } addAllViews(views); } private void coreLITTLEInit() { LinearLayout layout = new LinearLayout(getActivity()); layout.setOrientation(LinearLayout.VERTICAL); mCoreCheckBoxLITTLE = new AppCompatCheckBox[CPU.getLITTLECoreRange().size()]; mCoreProgressBarLITTLE = new ProgressBar[mCoreCheckBoxLITTLE.length]; mCoreUsageTextLITTLE = new AppCompatTextView[mCoreCheckBoxLITTLE.length]; mCoreFreqTextLITTLE = new AppCompatTextView[mCoreCheckBoxLITTLE.length]; for (int i = 0; i < mCoreCheckBoxLITTLE.length; i++) { View view = inflater.inflate(R.layout.coreview, container, false); mCoreCheckBoxLITTLE[i] = (AppCompatCheckBox) view.findViewById(R.id.core_checkbox); mCoreCheckBoxLITTLE[i].setText(getString(R.string.core, i + 1)); mCoreCheckBoxLITTLE[i].setOnClickListener(this); mCoreProgressBarLITTLE[i] = (ProgressBar) view.findViewById(R.id.progressbar); mCoreProgressBarLITTLE[i].setMax(CPU.getFreqs(CPU.getLITTLEcore()).size()); mCoreUsageTextLITTLE[i] = (AppCompatTextView) view.findViewById(R.id.usage); mCoreFreqTextLITTLE[i] = (AppCompatTextView) view.findViewById(R.id.freq); layout.addView(view); } CardViewItem.DCardView coreCard = new CardViewItem.DCardView(); coreCard.setTitle(getString(R.string.current_freq)); coreCard.setView(layout); addView(coreCard); } private void freqLITTLEInit() { List<String> freqs = new ArrayList<>(); for (int freq : CPU.getFreqs(CPU.getLITTLEcore())) freqs.add(freq / 1000 + getString(R.string.mhz)); mMaxFreqLITTLECard = new PopupCardView.DPopupCard(freqs); mMaxFreqLITTLECard.setDescription(getString(R.string.cpu_max_freq)); mMaxFreqLITTLECard.setItem(CPU.getMaxFreq(CPU.getLITTLEcore(), true) / 1000 + getString(R.string.mhz)); mMaxFreqLITTLECard.setOnDPopupCardListener(this); mMinFreqLITTLECard = new PopupCardView.DPopupCard(freqs); mMinFreqLITTLECard.setDescription(getString(R.string.cpu_min_freq)); mMinFreqLITTLECard.setItem(CPU.getMinFreq(CPU.getLITTLEcore(), true) / 1000 + getString(R.string.mhz)); mMinFreqLITTLECard.setOnDPopupCardListener(this); addView(mMaxFreqLITTLECard); addView(mMinFreqLITTLECard); if (CPU.hasMaxScreenOffFreq()) { mMaxScreenOffFreqLITTLECard = new PopupCardView.DPopupCard(freqs); mMaxScreenOffFreqLITTLECard.setDescription(getString(R.string.cpu_max_screen_off_freq)); mMaxScreenOffFreqLITTLECard.setItem(CPU.getMaxScreenOffFreq(CPU.getLITTLEcore(), true) / 1000 + getString(R.string.mhz)); mMaxScreenOffFreqLITTLECard.setOnDPopupCardListener(this); addView(mMaxScreenOffFreqLITTLECard); } } private void governorLITTLEInit() { mGovernorLITTLECard = new PopupCardView.DPopupCard(CPU.getAvailableGovernors(CPU.getLITTLEcore())); mGovernorLITTLECard.setDescription(getString(R.string.cpu_governor)); mGovernorLITTLECard.setItem(CPU.getCurGovernor(CPU.getLITTLEcore(), true)); mGovernorLITTLECard.setOnDPopupCardListener(this); mGovernorTunableLITTLECard = new CardViewItem.DCardView(); mGovernorTunableLITTLECard.setDescription(getString(R.string.cpu_governor_tunables)); mGovernorTunableLITTLECard.setOnDCardListener(this); addView(mGovernorLITTLECard); addView(mGovernorTunableLITTLECard); } private void mcPowerSavingInit() { mMcPowerSavingCard = new PopupCardView.DPopupCard(new ArrayList<>(Arrays.asList( CPU.getMcPowerSavingItems(getActivity())))); mMcPowerSavingCard.setTitle(getString(R.string.mc_power_saving)); mMcPowerSavingCard.setDescription(getString(R.string.mc_power_saving_summary)); mMcPowerSavingCard.setItem(CPU.getCurMcPowerSaving()); mMcPowerSavingCard.setOnDPopupCardListener(this); addView(mMcPowerSavingCard); } private void powerSavingWqInit() { mPowerSavingWqCard = new SwitchCardView.DSwitchCard(); mPowerSavingWqCard.setDescription(getString(R.string.power_saving_wq)); mPowerSavingWqCard.setChecked(CPU.isPowerSavingWqActive()); mPowerSavingWqCard.setOnDSwitchCardListener(this); addView(mPowerSavingWqCard); } private void cfsSchedulerInit() { mCFSSchedulerCard = new PopupCardView.DPopupCard(CPU.getAvailableCFSSchedulers()); mCFSSchedulerCard.setTitle(getString(R.string.cfs_scheduler_policy)); mCFSSchedulerCard.setDescription(getString(R.string.cfs_scheduler_policy_summary)); mCFSSchedulerCard.setItem(CPU.getCurrentCFSScheduler()); mCFSSchedulerCard.setOnDPopupCardListener(this); addView(mCFSSchedulerCard); } private void cpuQuietInit() { if (CPU.hasCpuQuietEnable()) { mCpuQuietEnableCard = new SwitchCardView.DSwitchCard(); mCpuQuietEnableCard.setTitle(getString(R.string.cpu_quiet)); mCpuQuietEnableCard.setDescription(getString(R.string.cpu_quiet_summary)); mCpuQuietEnableCard.setChecked(CPU.isCpuQuietActive()); mCpuQuietEnableCard.setOnDSwitchCardListener(this); addView(mCpuQuietEnableCard); } if (CPU.hasCpuQuietGovernors()) { mCpuQuietGovernorCard = new PopupCardView.DPopupCard(CPU.getCpuQuietAvailableGovernors()); mCpuQuietGovernorCard.setDescription(getString(R.string.cpu_quiet_governor)); mCpuQuietGovernorCard.setItem(CPU.getCpuQuietCurGovernor()); mCpuQuietGovernorCard.setOnDPopupCardListener(this); addView(mCpuQuietGovernorCard); } } private void Alu_T_BoostInit() { if (CPU.hasAlu_T_Boostfreq() && CPU.getFreqs() != null) { List<String> list = new ArrayList<>(); list.add(getString(R.string.disabled)); for (int freq : CPU.getFreqs()) list.add((freq / 1000) + getString(R.string.mhz)); mAlu_T_BoostFreqCard = new PopupCardView.DPopupCard(list); mAlu_T_BoostFreqCard.setTitle(getString(R.string.alu_t_boostfreq)); mAlu_T_BoostFreqCard.setDescription(getString(R.string.alu_t_boostfreq_summary)); mAlu_T_BoostFreqCard.setItem(CPU.getAlutBoostFreq()); mAlu_T_BoostFreqCard.setOnDPopupCardListener(this); addView(mAlu_T_BoostFreqCard); } if (CPU.hasAlu_T_Boostms()) { List<String> list = new ArrayList<>(); for (int i = 0; i < 5001; i += 10) list.add(i + getString(R.string.ms)); mAlu_T_BoostMsCard = new SeekBarCardView.DSeekBarCard(list); mAlu_T_BoostMsCard.setTitle(getString(R.string.alu_t_boostms)); mAlu_T_BoostMsCard.setDescription(getString(R.string.alu_t_boostms_summary)); mAlu_T_BoostMsCard.setProgress(CPU.getAlutBoostMs() / 10); mAlu_T_BoostMsCard.setOnDSeekBarCardListener(this); addView(mAlu_T_BoostMsCard); } if (CPU.hasAlu_T_Boostmii()) { List<String> list = new ArrayList<>(); for (int i = 0; i < 1501; i += 10) list.add(i + getString(R.string.ms)); mAlu_T_BoostMiiCard = new SeekBarCardView.DSeekBarCard(list); mAlu_T_BoostMiiCard.setTitle(getString(R.string.alu_t_boostmii)); mAlu_T_BoostMiiCard.setDescription(getString(R.string.alu_t_boostmii_summary)); mAlu_T_BoostMiiCard.setProgress(CPU.getAlutBoostMii() / 10); mAlu_T_BoostMiiCard.setOnDSeekBarCardListener(this); addView(mAlu_T_BoostMiiCard); } if (CPU.hasAlu_T_Boostcpus()) { List<String> list = new ArrayList<>(); for (int i = 0; i < 5; i += 1) list.add(i + getString(R.string.cores)); mAlu_T_BoostCpusCard = new SeekBarCardView.DSeekBarCard(list); mAlu_T_BoostCpusCard.setTitle(getString(R.string.alu_t_boostcpus)); mAlu_T_BoostCpusCard.setDescription(getString(R.string.alu_t_boostcpus_summary)); mAlu_T_BoostCpusCard.setProgress(CPU.getAlutBoostCpus() / 1); mAlu_T_BoostCpusCard.setOnDSeekBarCardListener(this); addView(mAlu_T_BoostCpusCard); } } private void cpuBoostInit() { views.clear(); if (CPU.hasCpuBoostEnable()) { mCpuBoostEnableCard = new SwitchCardView.DSwitchCard(); mCpuBoostEnableCard.setDescription(getString(R.string.cpu_boost)); mCpuBoostEnableCard.setChecked(CPU.isCpuBoostActive()); mCpuBoostEnableCard.setOnDSwitchCardListener(this); views.add(mCpuBoostEnableCard); } if (CPU.hasCpuBoostDebugMask()) { mCpuBoostDebugMaskCard = new SwitchCardView.DSwitchCard(); mCpuBoostDebugMaskCard.setTitle(getString(R.string.debug_mask)); mCpuBoostDebugMaskCard.setDescription(getString(R.string.debug_mask_summary)); mCpuBoostDebugMaskCard.setChecked(CPU.isCpuBoostDebugMaskActive()); mCpuBoostDebugMaskCard.setOnDSwitchCardListener(this); views.add(mCpuBoostDebugMaskCard); } if (CPU.hasCpuBoostMs()) { List<String> list = new ArrayList<>(); for (int i = 0; i < 5001; i += 10) list.add(i + getString(R.string.ms)); mCpuBoostMsCard = new SeekBarCardView.DSeekBarCard(list); mCpuBoostMsCard.setTitle(getString(R.string.interval)); mCpuBoostMsCard.setDescription(getString(R.string.interval_summary)); mCpuBoostMsCard.setProgress(CPU.getCpuBootMs() / 10); mCpuBoostMsCard.setOnDSeekBarCardListener(this); views.add(mCpuBoostMsCard); } if (CPU.hasCpuBoostSyncThreshold() && CPU.getFreqs() != null) { List<String> list = new ArrayList<>(); list.add(getString(R.string.disabled)); for (int freq : CPU.getFreqs()) list.add((freq / 1000) + getString(R.string.mhz)); mCpuBoostSyncThresholdCard = new PopupCardView.DPopupCard(list); mCpuBoostSyncThresholdCard.setTitle(getString(R.string.sync_threshold)); mCpuBoostSyncThresholdCard.setDescription(getString(R.string.sync_threshold_summary)); mCpuBoostSyncThresholdCard.setItem(CPU.getCpuBootSyncThreshold()); mCpuBoostSyncThresholdCard.setOnDPopupCardListener(this); views.add(mCpuBoostSyncThresholdCard); } if (CPU.hasCpuInputBoostEnable()) { mCpuInputBoostEnableCard = new SwitchCardView.DSwitchCard(); mCpuInputBoostEnableCard.setTitle(getString(R.string.input_boost)); mCpuInputBoostEnableCard.setDescription(getString(R.string.input_boost_summary)); mCpuInputBoostEnableCard.setChecked(CPU.isInputBoostActive()); mCpuInputBoostEnableCard.setOnDSwitchCardListener(this); views.add(mCpuInputBoostEnableCard); } if (CPU.isInputBoostActive() || !CPU.hasCpuInputBoostEnable()) { if (CPU.hasCpuBoostInputMs()) { List<String> list = new ArrayList<>(); for (int i = 0; i < 5001; i += 10) list.add(i + getString(R.string.ms)); mCpuBoostInputMsCard = new SeekBarCardView.DSeekBarCard(list); mCpuBoostInputMsCard.setTitle(getString(R.string.input_interval)); mCpuBoostInputMsCard.setDescription(getString(R.string.input_interval_summary)); mCpuBoostInputMsCard.setProgress(CPU.getCpuBootInputMs() / 10); mCpuBoostInputMsCard.setOnDSeekBarCardListener(this); views.add(mCpuBoostInputMsCard); } if (CPU.hasCpuBoostInputFreq() && CPU.getFreqs() != null) { List<String> list = new ArrayList<>(); list.add(getString(R.string.disabled)); for (int freq : CPU.getFreqs()) list.add((freq / 1000) + getString(R.string.mhz)); List<Integer> freqs = CPU.getCpuBootInputFreq(); mCpuBoostInputFreqCard = new PopupCardView.DPopupCard[freqs.size()]; for (int i = 0; i < freqs.size(); i++) { mCpuBoostInputFreqCard[i] = new PopupCardView.DPopupCard(list); if (i == 0) { if (freqs.size() > 1) mCpuBoostInputFreqCard[i].setTitle(getString(R.string.input_boost_freq_core, i + 1)); else mCpuBoostInputFreqCard[i].setTitle(getString(R.string.input_boost_freq)); mCpuBoostInputFreqCard[i].setDescription(getString(R.string.input_boost_freq_summary)); } else { mCpuBoostInputFreqCard[i].setDescription(getString(R.string.input_boost_freq_core, i + 1)); } mCpuBoostInputFreqCard[i].setItem(freqs.get(i)); mCpuBoostInputFreqCard[i].setOnDPopupCardListener(this); views.add(mCpuBoostInputFreqCard[i]); } } } if (CPU.hasCpuBoostWakeup()) { mCpuBoostWakeupCard = new SwitchCardView.DSwitchCard(); mCpuBoostWakeupCard.setTitle(getString(R.string.wakeup_boost)); mCpuBoostWakeupCard.setDescription(getString(R.string.wakeup_boost_summary)); mCpuBoostWakeupCard.setChecked(CPU.isCpuBoostWakeupActive()); mCpuBoostWakeupCard.setOnDSwitchCardListener(this); views.add(mCpuBoostWakeupCard); } if (CPU.hasCpuBoostHotplug()) { mCpuBoostHotplugCard = new SwitchCardView.DSwitchCard(); mCpuBoostHotplugCard.setTitle(getString(R.string.hotplug_boost)); mCpuBoostHotplugCard.setDescription(getString(R.string.hotplug_boost_summary)); mCpuBoostHotplugCard.setChecked(CPU.isCpuBoostHotplugActive()); mCpuBoostHotplugCard.setOnDSwitchCardListener(this); views.add(mCpuBoostHotplugCard); } if (views.size() > 0) { DDivider mCpuBoostDividerCard = new DDivider(); mCpuBoostDividerCard.setText(getString(R.string.cpu_boost)); addView(mCpuBoostDividerCard); addAllViews(views); } } private void cpuTouchBoostInit() { mCpuTouchBoostCard = new SwitchCardView.DSwitchCard(); mCpuTouchBoostCard.setTitle(getString(R.string.touch_boost)); mCpuTouchBoostCard.setDescription(getString(R.string.touch_boost_summary)); mCpuTouchBoostCard.setChecked(CPU.isCpuTouchBoostEnabled()); mCpuTouchBoostCard.setOnDSwitchCardListener(this); addView(mCpuTouchBoostCard); } @Override public void onClick(View v) { for (int i = 0; i < mCoreCheckBox.length; i++) if (v == mCoreCheckBox[i]) { List<Integer> range = CPU.getBigCoreRange(); if (range.get(i) == 0) { mCoreCheckBox[i].setChecked(true); return; } CPU.activateCore(range.get(i), ((CheckBox) v).isChecked(), getActivity()); return; } if (mCoreCheckBoxLITTLE != null) for (int i = 0; i < mCoreCheckBoxLITTLE.length; i++) if (v == mCoreCheckBoxLITTLE[i]) { List<Integer> range = CPU.getLITTLECoreRange(); if (range.get(i) == 0) { mCoreCheckBoxLITTLE[i].setChecked(true); return; } CPU.activateCore(range.get(i), ((CheckBox) v).isChecked(), getActivity()); return; } } @Override public void onItemSelected(PopupCardView.DPopupCard dPopupCard, int position) { for (int i = 0; i < CPU.getCoreCount(); i++) { if ( mMSM_LimiterResumeMaxFreqCard != null && dPopupCard == mMSM_LimiterResumeMaxFreqCard[i] ) { CPU.setMSM_LimiterResumeMaxFreq(CPU.getFreqs().get(position), getActivity(), i); } if ( mMSM_LimiterSuspendMaxFreqCard != null && dPopupCard == mMSM_LimiterSuspendMaxFreqCard[i] ) { CPU.setMSM_LimiterSuspendMaxFreq(CPU.getFreqs().get(position), i, getActivity()); } if ( mMSM_LimiterSuspendMinFreqCard != null && dPopupCard == mMSM_LimiterSuspendMinFreqCard[i] ) { CPU.setMSM_LimiterSuspendMinFreq(CPU.getFreqs().get(position), i, getActivity()); } if ( mMSM_LimiterGovernorPerCoreCard != null && dPopupCard == mMSM_LimiterGovernorPerCoreCard[i]) { CPU.setMSMLimiterGovernor(CPU.getAvailableGovernors().get(position), getActivity(), i); } } if (dPopupCard == mMaxFreqCard) CPU.setMaxFreq(CPU.getFreqs().get(position), getActivity()); else if (dPopupCard == mMinFreqCard) CPU.setMinFreq(CPU.getFreqs().get(position), getActivity()); else if (dPopupCard == mMSM_LimiterResumeMaxFreqNoPerCoreCard) { CPU.setMSM_LimiterResumeMaxFreq(CPU.getFreqs().get(position), getActivity(), -1); } else if (dPopupCard == mMSM_LimiterSuspendMaxFreqNoPerCoreCard) CPU.setMSM_LimiterSuspendMaxFreq(CPU.getFreqs().get(position), -1, getActivity()); else if (dPopupCard == mMSM_LimiterSuspendMinFreqNoPerCoreCard) CPU.setMSM_LimiterSuspendMinFreq(CPU.getFreqs().get(position), -1, getActivity()); else if (dPopupCard == mMaxScreenOffFreqCard) CPU.setMaxScreenOffFreq(CPU.getFreqs().get(position), getActivity()); else if (dPopupCard == mGovernorCard) CPU.setGovernor(CPU.getAvailableGovernors().get(position), getActivity()); else if (dPopupCard == mMSM_LimiterGovernorNoPerCoreCard) CPU.setMSMLimiterGovernor(CPU.getAvailableGovernors().get(position), getActivity(), -1); else if (dPopupCard == mMaxFreqLITTLECard) CPU.setMaxFreq(Control.CommandType.CPU_LITTLE, CPU.getFreqs(CPU.getLITTLEcore()).get(position), getActivity()); else if (dPopupCard == mMinFreqLITTLECard) CPU.setMinFreq(Control.CommandType.CPU_LITTLE, CPU.getFreqs(CPU.getLITTLEcore()).get(position), getActivity()); else if (dPopupCard == mMaxScreenOffFreqLITTLECard) CPU.setMaxScreenOffFreq(Control.CommandType.CPU_LITTLE, CPU.getFreqs(CPU.getLITTLEcore()).get(position), getActivity()); else if (dPopupCard == mGovernorLITTLECard) CPU.setGovernor(Control.CommandType.CPU_LITTLE, CPU.getAvailableGovernors(CPU.getLITTLEcore()).get(position), getActivity()); else if (dPopupCard == mAlu_T_BoostFreqCard) CPU.setAlutBoostFreq(CPU.getFreqs().get(position), getActivity()); else if (dPopupCard == mMcPowerSavingCard) CPU.setMcPowerSaving(position, getActivity()); else if (dPopupCard == mCFSSchedulerCard) CPU.setCFSScheduler(CPU.getAvailableCFSSchedulers().get(position), getActivity()); else if (dPopupCard == mCpuQuietGovernorCard) CPU.setCpuQuietGovernor(CPU.getCpuQuietAvailableGovernors().get(position), getActivity()); else if (dPopupCard == mCpuBoostSyncThresholdCard) CPU.setCpuBoostSyncThreshold(position == 0 ? 0 : CPU.getFreqs().get(position - 1), getActivity()); else { if (mCpuBoostInputFreqCard != null) for (int i = 0; i < mCpuBoostInputFreqCard.length; i++) if (dPopupCard == mCpuBoostInputFreqCard[i]) { CPU.setCpuBoostInputFreq(position == 0 ? 0 : CPU.getFreqs().get(position - 1), i, getActivity()); return; } } } @Override public void onClick(CardViewItem.DCardView dCardView) { for (int i = 0; i < CPU.getCoreCount(); i++) { if ( mGovernorTunableCoreCard != null && dCardView == mGovernorTunableCoreCard[i] ) { cpuFragment.core = i; cpuFragment.governorPart.reload(); cpuFragment.setCurrentItem(1); } } if (dCardView == mGovernorTunableNoPerCoreCard) { cpuFragment.cluster = "big"; cpuFragment.core = CPU.getBigCore(); cpuFragment.governorPart.reload(); cpuFragment.setCurrentItem(1); } else if (dCardView == mGovernorTunableLITTLECard) { cpuFragment.cluster = "little"; cpuFragment.core = CPU.getLITTLEcore(); cpuFragment.governorPart.reload(); cpuFragment.setCurrentItem(1); } } @Override public void onChanged(SeekBarCardView.DSeekBarCard dSeekBarCard, int position) { } @Override public void onStop(SeekBarCardView.DSeekBarCard dSeekBarCard, int position) { if (dSeekBarCard == mCpuBoostMsCard) CPU.setCpuBoostMs(position * 10, getActivity()); else if (dSeekBarCard == mCpuBoostInputMsCard) CPU.setCpuBoostInputMs(position * 10, getActivity()); else if (dSeekBarCard == mAlu_T_BoostMsCard) CPU.setAlutBoostMs(position * 10, getActivity()); else if (dSeekBarCard == mAlu_T_BoostMiiCard) CPU.setAlutBoostMii(position * 10, getActivity()); else if (dSeekBarCard == mAlu_T_BoostCpusCard) CPU.setAlutBoostCpus(position, getActivity()); } @Override public void onChecked(SwitchCardView.DSwitchCard dSwitchCard, boolean checked) { if (dSwitchCard == mCpuQuietEnableCard) CPU.activateCpuQuiet(checked, getActivity()); else if (dSwitchCard == mCpuBoostEnableCard) CPU.activateCpuBoost(checked, getActivity()); else if (dSwitchCard == mCpuBoostDebugMaskCard) CPU.activateCpuBoostDebugMask(checked, getActivity()); else if (dSwitchCard == mPowerSavingWqCard) CPU.activatePowerSavingWq(checked, getActivity()); else if (dSwitchCard == mCpuBoostWakeupCard) CPU.activateCpuBoostWakeup(checked, getActivity()); else if (dSwitchCard == mCpuInputBoostEnableCard) { CPU.activateCpuInputBoost(checked, getActivity()); ForceRefresh(); } else if (dSwitchCard == mCpuBoostHotplugCard) CPU.activateCpuBoostHotplug(checked, getActivity()); else if (dSwitchCard == mCpuTouchBoostCard) CPU.activateCpuTouchBoost(checked, getActivity()); else if (dSwitchCard == mMSM_Limiter_EnableCard) { CPU.activateMSM_Limiter(checked, getActivity()); ForceRefresh(); } else if (dSwitchCard == mPerCoreControlCard) { CPU.activatePerCoreControl(checked, getActivity()); ForceRefresh(); } } private void ForceRefresh() { try { Thread.sleep(250); } catch (InterruptedException ex) { } CPUFragment.cpuFragment.cpuPart.view.invalidate(); getActivity().getSupportFragmentManager().beginTransaction().detach(this).attach(this).commit(); } @Override public boolean onRefresh() { if (mMSM_LimiterGovernorPerCoreCard != null) { for (int i = 0; i < CPU.getCoreCount(); i++) { String msm_limiter_governor = CPU.getMSMLimiterGovernor(i); if (mMSM_LimiterGovernorPerCoreCard[i] != null && !msm_limiter_governor.isEmpty()) { mMSM_LimiterGovernorPerCoreCard[i].setItem(msm_limiter_governor); } } } if (mTempCard != null) mTempCard.setDescription(CPU.getTemp()); if (mCoreCheckBox != null && mCoreProgressBar != null && mCoreFreqText != null) { List<Integer> range = CPU.getBigCoreRange(); for (int i = 0; i < mCoreCheckBox.length; i++) { int cur = CPU.getCurFreq(range.get(i)); if (mCoreCheckBox[i] != null) mCoreCheckBox[i].setChecked(cur != 0); if (mCoreProgressBar[i] != null) mCoreProgressBar[i].setProgress(CPU.getFreqs().indexOf(cur) + 1); if (mCoreFreqText[i] != null) mCoreFreqText[i].setText(cur == 0 ? getString(R.string.offline) : cur / 1000 + getString(R.string.mhz)); } } if (mMaxFreqCard != null) { int maxFreq = CPU.getMaxFreq(false); if (maxFreq != 0) mMaxFreqCard.setItem(maxFreq / 1000 + getString(R.string.mhz)); } if (mMinFreqCard != null) { int minFreq = CPU.getMinFreq(false); if (minFreq != 0) mMinFreqCard.setItem(minFreq / 1000 + getString(R.string.mhz)); } if (mGovernorCard != null) { String governor = CPU.getCurGovernor(false); if (!governor.isEmpty()) mGovernorCard.setItem(governor); } if (mMSM_LimiterGovernorNoPerCoreCard != null) { String msm_limiter_governor = CPU.getMSMLimiterGovernor(-1); if (!msm_limiter_governor.isEmpty()) mMSM_LimiterGovernorNoPerCoreCard.setItem(msm_limiter_governor); } if (mCoreCheckBoxLITTLE != null && mCoreProgressBarLITTLE != null && mCoreFreqTextLITTLE != null) { List<Integer> range = CPU.getLITTLECoreRange(); for (int i = 0; i < mCoreCheckBoxLITTLE.length; i++) { int cur = CPU.getCurFreq(range.get(i)); if (mCoreCheckBoxLITTLE[i] != null) mCoreCheckBoxLITTLE[i].setChecked(cur != 0); if (mCoreProgressBarLITTLE[i] != null) mCoreProgressBarLITTLE[i].setProgress(CPU.getFreqs(CPU.getLITTLEcore()).indexOf(cur) + 1); if (mCoreFreqTextLITTLE[i] != null) mCoreFreqTextLITTLE[i].setText(cur == 0 ? getString(R.string.offline) : cur / 1000 + getString(R.string.mhz)); } } if (mMaxFreqLITTLECard != null) { int maxFreq = CPU.getMaxFreq(CPU.getLITTLEcore(), false); if (maxFreq != 0) mMaxFreqLITTLECard.setItem((maxFreq / 1000) + getString(R.string.mhz)); } if (mMinFreqLITTLECard != null) { int minFreq = CPU.getMinFreq(CPU.getLITTLEcore(), false); if (minFreq != 0) mMinFreqLITTLECard.setItem(minFreq / 1000 + getString(R.string.mhz)); } if (mGovernorLITTLECard != null) { String governor = CPU.getCurGovernor(CPU.getLITTLEcore(), false); if (!governor.isEmpty()) mGovernorLITTLECard.setItem(governor); } return true; } private final Runnable cpuUsage = new Runnable() { @Override public void run() { new Thread(new Runnable() { @Override public void run() { final float[] usage = CPU.getCpuUsage(); try { getActivity().runOnUiThread(new Runnable() { @Override public void run() { if (usage != null) { if (mUsageCard != null) mUsageCard.setProgress(Math.round(usage[0])); if (mCoreUsageText != null) { List<Integer> cores = CPU.getBigCoreRange(); for (int i = 0; i < mCoreUsageText.length; i++) { String message = Math.round(usage[cores.get(i) + 1]) + "%"; if (mCoreUsageText[i] != null) mCoreUsageText[i].setText(message); if (mCoreProgressBar != null && mCoreProgressBar[i].getProgress() == 0) mCoreUsageText[i].setText(""); } } if (mCoreUsageTextLITTLE != null) { List<Integer> cores = CPU.getLITTLECoreRange(); for (int i = 0; i < mCoreUsageTextLITTLE.length; i++) { String message = Math.round(usage[cores.get(i) + 1]) + "%"; if (mCoreUsageTextLITTLE[i] != null) mCoreUsageTextLITTLE[i].setText(message); if (mCoreProgressBarLITTLE != null && mCoreProgressBarLITTLE[i].getProgress() == 0) mCoreUsageTextLITTLE[i].setText(""); } } } } }); } catch (NullPointerException ignored) { } } }).start(); getHandler().postDelayed(cpuUsage, 1000); } }; @Override public void onResume() { super.onResume(); Handler hand; if ((hand = getHandler()) != null) hand.post(cpuUsage); } @Override public void onPause() { super.onPause(); Handler hand; if ((hand = getHandler()) != null) hand.removeCallbacks(cpuUsage); } } public static class GovernorPart extends PathReaderFragment { @Override public String getName() { if (!CPU.isPerCoreControlActive(getActivity())) { return CPU.getCurGovernor(cpuFragment.core, true); } if (CPU.isPerCoreControlActive(getActivity())) { return CPU.getMSMLimiterGovernor(cpuFragment.core); } return(null); } @Override public String getPath() { if (!CPU.isPerCoreControlActive(getActivity())) { if ( CPU.isBigLITTLE() && cpuFragment.cluster.equals("little") && Utils.existFile(String.format(CPU_GOVERNOR_TUNABLES_CORE, CPU.LITTLEcore) + "/" + CPU.getCurGovernor(CPU.getLITTLEcore(), true))) { return getPath(String.format(CPU_GOVERNOR_TUNABLES_CORE, CPU.LITTLEcore), CPU.getCurGovernor(cpuFragment.core, true)); } else if ( CPU.isBigLITTLE() && cpuFragment.cluster.equals("big") && Utils.existFile(String.format(CPU_GOVERNOR_TUNABLES_CORE, CPU.bigCore) + "/" + CPU.getCurGovernor(CPU.getBigCore(), true))) { return getPath(String.format(CPU_GOVERNOR_TUNABLES_CORE, CPU.bigCore), CPU.getCurGovernor(cpuFragment.core, true)); } return getPath(CPU_GOVERNOR_TUNABLES, CPU.getCurGovernor(cpuFragment.core, true)); } if (CPU.isPerCoreControlActive(getActivity())) { return getPath(CPU_GOVERNOR_TUNABLES, CPU.getMSMLimiterGovernor(cpuFragment.core)); } return(null); } private String getPath(String path, String governor) { if (Utils.existFile(path + "/" + governor)) return path + "/" + governor; else for (String file : new RootFile(path).list()) if (governor.contains(file)) return path + "/" + file; return null; } @Override public PATH_TYPE getType() { return PATH_TYPE.GOVERNOR; } @Override public String getError(Context context) { if (!CPU.isPerCoreControlActive(getActivity())) { return context.getString(R.string.not_tunable, CPU.getCurGovernor(cpuFragment.core, true)); } if (CPU.isPerCoreControlActive(getActivity())) { return context.getString(R.string.not_tunable, CPU.getMSMLimiterGovernor(cpuFragment.core)); } return(null); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gemstone.gemfire.distributed.internal.direct; import java.io.IOException; import java.io.NotSerializableException; import java.net.InetAddress; import java.net.SocketTimeoutException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Properties; import java.util.Set; import java.util.concurrent.Semaphore; import org.apache.logging.log4j.Logger; import com.gemstone.gemfire.CancelException; import com.gemstone.gemfire.InternalGemFireException; import com.gemstone.gemfire.SystemFailure; import com.gemstone.gemfire.ToDataException; import com.gemstone.gemfire.cache.TimeoutException; import com.gemstone.gemfire.distributed.DistributedSystemDisconnectedException; import com.gemstone.gemfire.distributed.internal.DM; import com.gemstone.gemfire.distributed.internal.DMStats; import com.gemstone.gemfire.distributed.internal.DirectReplyProcessor; import com.gemstone.gemfire.distributed.internal.DistributionConfig; import com.gemstone.gemfire.distributed.internal.DistributionManager; import com.gemstone.gemfire.distributed.internal.DistributionMessage; import com.gemstone.gemfire.distributed.internal.ReplyProcessor21; import com.gemstone.gemfire.distributed.internal.membership.DistributedMembershipListener; import com.gemstone.gemfire.distributed.internal.membership.InternalDistributedMember; import com.gemstone.gemfire.distributed.internal.membership.MembershipManager; import com.gemstone.gemfire.internal.SocketCreator; import com.gemstone.gemfire.internal.cache.DirectReplyMessage; import com.gemstone.gemfire.internal.i18n.LocalizedStrings; import com.gemstone.gemfire.internal.logging.InternalLogWriter; import com.gemstone.gemfire.internal.logging.LogService; import com.gemstone.gemfire.internal.logging.log4j.AlertAppender; import com.gemstone.gemfire.internal.logging.log4j.LocalizedMessage; import com.gemstone.gemfire.internal.logging.log4j.LogMarker; import com.gemstone.gemfire.internal.tcp.BaseMsgStreamer; import com.gemstone.gemfire.internal.tcp.ConnectExceptions; import com.gemstone.gemfire.internal.tcp.Connection; import com.gemstone.gemfire.internal.tcp.ConnectionException; import com.gemstone.gemfire.internal.tcp.MemberShunnedException; import com.gemstone.gemfire.internal.tcp.MsgStreamer; import com.gemstone.gemfire.internal.tcp.Stub; import com.gemstone.gemfire.internal.tcp.TCPConduit; import com.gemstone.gemfire.internal.util.Breadcrumbs; import com.gemstone.gemfire.internal.util.concurrent.ReentrantSemaphore; import com.gemstone.org.jgroups.util.StringId; /** * @author Bruce Schuchardt * @author Darrel Schneider * DirectChannel is used to interact directly with other Direct servers to * distribute GemFire messages to other nodes. It is held by a * com.gemstone.gemfire.internal.cache.distribution.DistributionChannel, * which is used by the DistributionManager to send and receive asynchronous * messages. */ public class DirectChannel { private static final Logger logger = LogService.getLogger(); /** this is the conduit used for communications */ private final transient TCPConduit conduit; private volatile boolean disconnected = true; /** This is set to true when completely disconnected (all connections are closed) */ private volatile boolean disconnectCompleted = true; /** this is the DistributionManager, most of the time */ private final DistributedMembershipListener receiver; private final InetAddress address; InternalDistributedMember localAddr; /** * Callback to set the local address, must be done before this channel is used. * * @param localAddr * @throws ConnectionException if the conduit has stopped */ public void setLocalAddr(InternalDistributedMember localAddr) { this.localAddr = localAddr; conduit.setLocalAddr(localAddr); if (disconnected) { disconnected = false; disconnectCompleted = false; this.groupOrderedSenderSem = new ReentrantSemaphore(MAX_GROUP_SENDERS); this.groupUnorderedSenderSem = new ReentrantSemaphore(MAX_GROUP_SENDERS); } } /** * when the initial number of members is known, this method is invoked * to ensure that connections to those members can be established in a * reasonable amount of time. See bug 39848 * @param numberOfMembers */ public void setMembershipSize(int numberOfMembers) { conduit.setMaximumHandshakePoolSize(numberOfMembers); } /** * @param mgr * @param dm * @param dc * @param unused * @throws ConnectionException */ public DirectChannel(MembershipManager mgr, DistributedMembershipListener dm, DistributionConfig dc, Properties unused) throws ConnectionException { this.receiver = dm; this.address = initAddress(dm); boolean isBindAddress = dc.getBindAddress() != null; try { int port = Integer.getInteger("tcpServerPort", 0).intValue(); if (port == 0) { port = dc.getTcpPort(); } Properties props = System.getProperties(); if (props.getProperty("p2p.shareSockets") == null) { props.setProperty("p2p.shareSockets", String.valueOf(dc.getConserveSockets())); } if (dc.getSocketBufferSize() != DistributionConfig.DEFAULT_SOCKET_BUFFER_SIZE) { // Note that the system property "p2p.tcpBufferSize" will be // overridden by the new "socket-buffer-size". props.setProperty("p2p.tcpBufferSize", String.valueOf(dc.getSocketBufferSize())); } if (props.getProperty("p2p.idleConnectionTimeout") == null) { props.setProperty("p2p.idleConnectionTimeout", String.valueOf(dc.getSocketLeaseTime())); } int[] range = dc.getMembershipPortRange(); props.setProperty("membership_port_range_start", ""+range[0]); props.setProperty("membership_port_range_end", ""+range[1]); this.conduit = new TCPConduit(mgr, port, address, isBindAddress, this, props); disconnected = false; disconnectCompleted = false; this.groupOrderedSenderSem = new ReentrantSemaphore(MAX_GROUP_SENDERS); this.groupUnorderedSenderSem = new ReentrantSemaphore(MAX_GROUP_SENDERS); logger.info(LocalizedMessage.create( LocalizedStrings.DirectChannel_GEMFIRE_P2P_LISTENER_STARTED_ON__0, conduit.getId())); } catch (ConnectionException ce) { logger.fatal(LocalizedMessage.create( LocalizedStrings.DirectChannel_UNABLE_TO_INITIALIZE_DIRECT_CHANNEL_BECAUSE__0, new Object[]{ce.getMessage()}), ce); throw ce; // fix for bug 31973 } } // /** // * // * @param addr destination for the message // * @param stubMap map containing all the stubs // * @param msg the original message // * @param msgBuf the serialized message // * @param directAck true if we need an ack // * @param processorType the type (serialized, etc.) // * @return if directAck, the Connection that needs the acknowledgment // * @throws MissingStubException if we do not have a Stub for the recipient // * @throws IOException if the message could not be sent // */ // private Connection attemptSingleSend(MembershipManager mgr, // InternalDistributedMember addr, // DistributionMessage msg, ByteBuffer msgBuf, // boolean directAck, int processorType) // throws MissingStubException, IOException // { // if (!msg.deliverToSender() && localAddr.equals(addr)) // return null; // if (addr == null) // return null; // Stub dest = mgr.getStubForMember(addr); // if (dest == null) { // // This should only happen if the member is no longer in the view. // Assert.assertTrue(!mgr.memberExists(addr)); // throw new MissingStubException("No stub"); // } // try { // msgBuf.position(0); // fix for bug#30680 // Connection con = conduit.sendSync(dest, msgBuf, processorType, msg); // if (directAck) // return con; // else // return null; // } // catch(IOException t) { // throw t; // } // } /** * Return how many concurrent operations should be allowed by default. * since 6.6, this has been raised to Integer.MAX value from the number * of available processors. Setting this to a lower value raises the possibility * of a deadlock when serializing a message with PDX objects, because the * PDX serialization can trigger further distribution. */ static public final int DEFAULT_CONCURRENCY_LEVEL = Integer.getInteger("p2p.defaultConcurrencyLevel", Integer.MAX_VALUE / 2).intValue(); /** * The maximum number of concurrent senders sending a message to a group of recipients. */ static private final int MAX_GROUP_SENDERS = Integer.getInteger("p2p.maxGroupSenders", DEFAULT_CONCURRENCY_LEVEL).intValue(); private Semaphore groupUnorderedSenderSem; // TODO this should be final? private Semaphore groupOrderedSenderSem; // TODO this should be final? // /** // * cause of abnormal shutdown, if any // */ // private volatile Exception shutdownCause; private Semaphore getGroupSem(boolean ordered) { if (ordered) { return this.groupOrderedSenderSem; } else { return this.groupUnorderedSenderSem; } } private void acquireGroupSendPermission(boolean ordered) { if (this.disconnected) { throw new com.gemstone.gemfire.distributed.DistributedSystemDisconnectedException(LocalizedStrings.DirectChannel_DIRECT_CHANNEL_HAS_BEEN_STOPPED.toLocalizedString()); } // @todo darrel: add some stats final Semaphore s = getGroupSem(ordered); for (;;) { this.conduit.getCancelCriterion().checkCancelInProgress(null); boolean interrupted = Thread.interrupted(); try { s.acquire(); break; } catch (InterruptedException ex) { interrupted = true; } finally { if (interrupted) { Thread.currentThread().interrupt(); } } } // for if (this.disconnected) { s.release(); throw new DistributedSystemDisconnectedException(LocalizedStrings.DirectChannel_COMMUNICATIONS_DISCONNECTED.toLocalizedString()); } } private void releaseGroupSendPermission(boolean ordered) { final Semaphore s = getGroupSem(ordered); s.release(); } /** * Returns true if calling thread owns its own communication resources. */ boolean threadOwnsResources() { DM d = getDM(); if (d != null) { return d.getSystem().threadOwnsResources() && !AlertAppender.isThreadAlerting(); } return false; // Boolean b = getThreadOwnsResourcesRegistration(); // if (b == null) { // // thread does not have a preference so return default // return !this.owner.shareSockets; // return false; // } else { // return b.booleanValue(); // } } /** * This is basically just sendToMany, giving us a way to see on the stack * whether we are sending to a single member or multiple members, in which * case the group-send lock will be held during distribution. * * @param mgr - the membership manager * @param p_destinations - the list of addresses to send the message to. * @param msg - the message to send * @param ackWaitThreshold * @param ackSAThreshold the severe alert threshold * @return number of bytes sent * @throws ConnectExceptions if message could not be send to its * <code>destination</code> * @throws NotSerializableException * If the msg cannot be serialized */ private final int sendToOne(final MembershipManager mgr, InternalDistributedMember[] p_destinations, final DistributionMessage msg, long ackWaitThreshold, long ackSAThreshold) throws ConnectExceptions, NotSerializableException { return sendToMany(mgr, p_destinations, msg, ackWaitThreshold, ackSAThreshold); } /** * Sends a msg to a list of destinations. This code does some special optimizations * to stream large messages * @param mgr - the membership manager * @param p_destinations - the list of addresses to send the message to. * @param msg - the message to send * @param ackWaitThreshold * @param ackSAThreshold the severe alert threshold * @return number of bytes sent * @throws ConnectExceptions if message could not be send to its * <code>destination</code> * @throws NotSerializableException * If the msg cannot be serialized */ private int sendToMany(final MembershipManager mgr, InternalDistributedMember[] p_destinations, final DistributionMessage msg, long ackWaitThreshold, long ackSAThreshold) throws ConnectExceptions, NotSerializableException { InternalDistributedMember destinations[] = p_destinations; // Collects connect exceptions that happened during previous attempts to send. // These represent members we are not able to distribute to. ConnectExceptions failedCe = null; // Describes the destinations that we need to retry the send to. ConnectExceptions retryInfo = null; int bytesWritten = 0; boolean retry = false; final boolean orderedMsg = msg.orderedDelivery() || Connection.isDominoThread(); //Connections we actually sent messages to. final List totalSentCons = new ArrayList(destinations.length); boolean interrupted = false; long ackTimeout = 0; long ackSDTimeout = 0; long startTime = 0; final DirectReplyMessage directMsg; if (msg instanceof DirectReplyMessage) { directMsg = (DirectReplyMessage)msg; } else { directMsg = null; } if (directMsg != null || msg.getProcessorId() > 0) { ackTimeout = (int)(ackWaitThreshold * 1000); if (msg.isSevereAlertCompatible() || ReplyProcessor21.isSevereAlertProcessingForced()) { ackSDTimeout = (int)(ackSAThreshold * 1000); if (ReplyProcessor21.getShortSevereAlertProcessing()) { ackSDTimeout = (int)(ReplyProcessor21.PR_SEVERE_ALERT_RATIO * ackSDTimeout); } } } boolean directReply = false; if (directMsg != null && directMsg.supportsDirectAck() && threadOwnsResources()) { directReply = true; } //If this is a direct reply message, but we are sending it //over the shared socket, tell the message it needs to //use a regular reply processor. if (!directReply && directMsg != null) { directMsg.registerProcessor(); } try { do { interrupted = interrupted || Thread.interrupted(); /** * Exceptions that happened during one attempt to send */ if (retryInfo != null) { // need to retry to each of the guys in the exception List retryMembers = retryInfo.getMembers(); InternalDistributedMember[] retryDest = new InternalDistributedMember[retryMembers.size()]; retryDest = (InternalDistributedMember[])retryMembers.toArray(retryDest); destinations = retryDest; retryInfo = null; retry = true; } final List cons = new ArrayList(destinations.length); ConnectExceptions ce = getConnections(mgr, msg, destinations, orderedMsg, retry, ackTimeout, ackSDTimeout, cons); if (directReply && msg.getProcessorId() > 0) { // no longer a direct-reply message? directReply = false; } if (ce != null) { if (failedCe != null) { failedCe.getMembers().addAll(ce.getMembers()); failedCe.getCauses().addAll(ce.getCauses()); } else { failedCe = ce; } ce = null; } if (cons.isEmpty()) { if (failedCe != null) { throw failedCe; } return bytesWritten; } boolean sendingToGroup = cons.size() > 1; Connection permissionCon = null; if (sendingToGroup) { acquireGroupSendPermission(orderedMsg); } else { // sending to just one guy permissionCon = (Connection)cons.get(0); if (permissionCon != null) { try { permissionCon.acquireSendPermission(); } catch (ConnectionException conEx) { // Set retryInfo and then retry. // We want to keep calling TCPConduit.getConnection until it doesn't // return a connection. retryInfo = new ConnectExceptions(); retryInfo.addFailure(permissionCon.getRemoteAddress(), conEx); continue; } } } try { if (logger.isTraceEnabled(LogMarker.DM)) { logger.trace(LogMarker.DM, "{}{}) to {} peers ({}) via tcp/ip", (retry ? "Retrying send (" : "Sending ("), msg, cons.size(), cons); } DMStats stats = getDMStats(); List<?> sentCons; // used for cons we sent to this time final BaseMsgStreamer ms = MsgStreamer.create(cons, msg, directReply, stats); try { startTime = 0; if (ackTimeout > 0) { startTime = System.currentTimeMillis(); } ms.reserveConnections(startTime, ackTimeout, ackSDTimeout); int result = ms.writeMessage(); if (bytesWritten == 0) { // bytesWritten only needs to be set once. // if we have to do a retry we don't want to count // each one's bytes. bytesWritten = result; } ce = ms.getConnectExceptions(); sentCons = ms.getSentConnections(); totalSentCons.addAll(sentCons); } catch (NotSerializableException e) { throw e; } catch (ToDataException e) { throw e; } catch (IOException ex) { throw new InternalGemFireException(LocalizedStrings.DirectChannel_UNKNOWN_ERROR_SERIALIZING_MESSAGE.toLocalizedString(), ex); } finally { try { ms.close(); } catch (IOException e) { throw new InternalGemFireException( "Unknown error serializing message", e); } } if (ce != null) { retryInfo = ce; ce = null; } if (directReply && !sentCons.isEmpty()) { long readAckStart = 0; if (stats != null) { readAckStart = stats.startReplyWait(); } try { ce = readAcks(sentCons, startTime, ackTimeout, ackSDTimeout, ce, directMsg.getDirectReplyProcessor()); } finally { if (stats != null) { stats.endReplyWait(readAckStart, startTime); } } } } finally { if (sendingToGroup) { releaseGroupSendPermission(orderedMsg); } else if (permissionCon != null) { permissionCon.releaseSendPermission(); } } if (ce != null) { if (retryInfo != null) { retryInfo.getMembers().addAll(ce.getMembers()); retryInfo.getCauses().addAll(ce.getCauses()); } else { retryInfo = ce; } ce = null; } if (retryInfo != null) { this.conduit.getCancelCriterion().checkCancelInProgress(null); } } while (retryInfo != null); } finally { if (interrupted) { Thread.currentThread().interrupt(); } for (Iterator it=totalSentCons.iterator(); it.hasNext();) { Connection con = (Connection)it.next(); con.setInUse(false, 0, 0, 0, null); } } if (failedCe != null) { throw failedCe; } return bytesWritten; } private ConnectExceptions readAcks(List sentCons, long startTime, long ackTimeout, long ackSDTimeout, ConnectExceptions cumulativeExceptions, DirectReplyProcessor processor) { ConnectExceptions ce = cumulativeExceptions; for (Iterator it=sentCons.iterator(); it.hasNext();) { Connection con = (Connection)it.next(); //We don't expect replies on shared connections. if(con.isSharedResource()) { continue; } int msToWait = (int)(ackTimeout - (System.currentTimeMillis() - startTime)); // if the wait threshold has already been reached during transmission // of the message, set a small wait period just to make sure the // acks haven't already come back if (msToWait <= 0) { msToWait = 10; } long msInterval = ackSDTimeout; if (msInterval <= 0) { msInterval = Math.max(ackTimeout, 1000); } try { try { con.readAck(msToWait, msInterval, processor); } catch (SocketTimeoutException ex) { handleAckTimeout(ackTimeout, ackSDTimeout, con, processor); } } catch (ConnectionException conEx) { if (ce == null) { ce = new ConnectExceptions(); } ce.addFailure(con.getRemoteAddress(), conEx); } } return ce; } /** * Obtain the connections needed to transmit a message. The connections are * put into the cons object (the last parameter) * * @param mgr the membership manager * @param msg the message to send * @param destinations who to send the message to * @param preserveOrder true if the msg should ordered * @param retry whether this is a retransmission * @param ackTimeout the ack warning timeout * @param ackSDTimeout the ack severe alert timeout * @param cons a list to hold the connections * @return null if everything went okay, or a ConnectExceptions object if some connections couldn't be obtained */ private ConnectExceptions getConnections( MembershipManager mgr, DistributionMessage msg, InternalDistributedMember[] destinations, boolean preserveOrder, boolean retry, long ackTimeout, long ackSDTimeout, List cons) { ConnectExceptions ce = null; for (int i=0; i < destinations.length; i++) { InternalDistributedMember destination = destinations[i]; if (destination == null) { continue; } if (localAddr.equals(destination)) { // jgroups does not deliver messages to a sender, so we don't support // it here either. continue; } Stub stub = mgr.getStubForMember(destination); if (stub == null) { // This should only happen if the member is no longer in the view. if (logger.isTraceEnabled(LogMarker.DM)) { logger.trace(LogMarker.DM, "No Stub for {}", destination); } // The only time getStubForMember returns null is if we are // shunning that member or we are shutting down. // So the following assertion is wrong: //Assert.assertTrue(!mgr.memberExists(destination)); // instead we should: // Assert.assertTrue(mgr.shutdownInProgress() || mgr.isShunned(destination)); //but this is not worth doing and isShunned is not public. // SO the assert has been deadcoded. if (ce == null) ce = new ConnectExceptions(); ce.addFailure(destination, new MissingStubException(LocalizedStrings.DirectChannel_NO_STUB_0.toLocalizedString())); } else { try { long startTime = 0; if (ackTimeout > 0) { startTime = System.currentTimeMillis(); } Connection con = conduit.getConnection(destination, stub, preserveOrder, retry, startTime, ackTimeout, ackSDTimeout); con.setInUse(true, startTime, 0, 0, null); // fix for bug#37657 cons.add(con); if(con.isSharedResource() && msg instanceof DirectReplyMessage) { DirectReplyMessage directMessage = (DirectReplyMessage) msg; directMessage.registerProcessor(); } } catch (IOException ex) { if (ce == null) ce = new ConnectExceptions(); ce.addFailure(destination, ex); } } } // for return ce; } /** * Method send. * @param mgr - the membership manager * @param destinations - the address(es) to send the message to. * @param msg - the message to send * @param ackWaitThreshold * @param ackSAThreshold severe alert threshold * @return number of bytes sent * @throws ConnectExceptions if message could not be send to one or more * of the <code>destinations</code> * @throws NotSerializableException * If the content cannot be serialized * @throws ConnectionException if the conduit has stopped */ public int send(MembershipManager mgr, InternalDistributedMember[] destinations, DistributionMessage msg, long ackWaitThreshold, long ackSAThreshold) throws ConnectExceptions, NotSerializableException { if (disconnected) { if (logger.isDebugEnabled()) { logger.debug("Returning from DirectChannel send because channel is disconnected: {}", msg); } return 0; } if (destinations == null) { if (logger.isDebugEnabled()) { logger.debug("Returning from DirectChannel send because null set passed in: {}", msg); } return 0; } if (destinations.length == 0) { if (logger.isDebugEnabled()) { logger.debug("Returning from DirectChannel send because empty destinations passed in {}", msg); } return 0; } msg.setSender(localAddr); if (destinations.length==1) { return sendToOne(mgr, destinations, msg, ackWaitThreshold, ackSAThreshold); } else { return sendToMany(mgr, destinations, msg, ackWaitThreshold, ackSAThreshold); } } /** * Returns null if no stats available. */ public DMStats getDMStats() { DM dm = getDM(); if (dm != null) { return dm.getStats(); // fix for bug#34004 } else { return null; } } /** * Returns null if no config is available. * @since 4.2.2 */ public DistributionConfig getDMConfig() { DM dm = getDM(); if (dm != null) { return dm.getConfig(); } else { return null; } } /** * Returns null if no dm available. */ public DM getDM() { return this.receiver.getDM(); } /** * * @param ackTimeout ack wait threshold * @param ackSATimeout severe alert threshold * @param c * @param processor * @throws ConnectionException */ private void handleAckTimeout(long ackTimeout, long ackSATimeout, Connection c, DirectReplyProcessor processor) throws ConnectionException { DM dm = getDM(); Set activeMembers = dm.getDistributionManagerIds(); // Increment the stat dm.getStats().incReplyTimeouts(); // an alert that will show up in the console { final StringId msg = LocalizedStrings.DirectChannel_0_SECONDS_HAVE_ELAPSED_WHILE_WAITING_FOR_REPLY_FROM_1_ON_2_WHOSE_CURRENT_MEMBERSHIP_LIST_IS_3; final Object[] msgArgs = new Object[] {Long.valueOf(ackTimeout/1000), c.getRemoteAddress(), dm.getId(), activeMembers}; logger.warn(LocalizedMessage.create(msg, msgArgs)); msgArgs[3] = "(omitted)"; Breadcrumbs.setProblem(msg, msgArgs); if (ReplyProcessor21.THROW_EXCEPTION_ON_TIMEOUT) { // init the cause to be a TimeoutException so catchers can determine cause TimeoutException cause = new TimeoutException(LocalizedStrings.TIMED_OUT_WAITING_FOR_ACKS.toLocalizedString()); throw new InternalGemFireException(msg.toLocalizedString(msgArgs), cause); } } if (activeMembers.contains(c.getRemoteAddress())) { // wait for ack-severe-alert-threshold period first, then wait forever if (ackSATimeout > 0) { try { c.readAck((int)ackSATimeout, ackSATimeout, processor); return; } catch (SocketTimeoutException e) { Object[] args = new Object[] {Long.valueOf((ackSATimeout+ackTimeout)/1000), c.getRemoteAddress(), dm.getId(), activeMembers}; logger.fatal(LocalizedMessage.create( LocalizedStrings.DirectChannel_0_SECONDS_HAVE_ELAPSED_WHILE_WAITING_FOR_REPLY_FROM_1_ON_2_WHOSE_CURRENT_MEMBERSHIP_LIST_IS_3, args)); } } try { c.readAck(0, 0, processor); } catch (SocketTimeoutException ex) { // this can never happen when called with timeout of 0 logger.error(LocalizedMessage.create( LocalizedStrings.DirectChannel_UNEXPECTED_TIMEOUT_WHILE_WAITING_FOR_ACK_FROM__0, c.getRemoteAddress()), ex); } } else { logger.warn(LocalizedMessage.create( LocalizedStrings.DirectChannel_VIEW_NO_LONGER_HAS_0_AS_AN_ACTIVE_MEMBER_SO_WE_WILL_NO_LONGER_WAIT_FOR_IT, c.getRemoteAddress())); processor.memberDeparted(c.getRemoteAddress(), true); } } public void receive(DistributionMessage msg, int bytesRead, Stub connId) { if (disconnected) { return; } try { receiver.messageReceived(msg); } catch (MemberShunnedException e) { throw e; } catch (CancelException e) { // ignore } catch (Exception ex) { // Don't freak out if the DM is shutting down if (this.conduit.getCancelCriterion().cancelInProgress() == null) { logger.fatal(LocalizedMessage.create(LocalizedStrings.DirectChannel_WHILE_PULLING_A_MESSAGE), ex); } } } // public void newMemberConnected(InternalDistributedMember member, Stub id) { // receiver.newMemberConnected(member, id); // } public InternalDistributedMember getLocalAddress() { return this.localAddr; } /** * Ensure that the TCPConduit class gets loaded. * * @see SystemFailure#loadEmergencyClasses() */ public static void loadEmergencyClasses() { TCPConduit.loadEmergencyClasses(); } /** * Close the Conduit * * @see SystemFailure#emergencyClose() */ public void emergencyClose() { this.conduit.emergencyClose(); } /** * This closes down the Direct connection. Theoretically you can disconnect * and, if you need to use the channel again you can and it will automatically * reconnect. Reconnection will cause a new local address to be generated. */ public synchronized void disconnect(Exception cause) { // this.shutdownCause = cause; this.disconnected = true; this.disconnectCompleted = false; releaseGroupSendPermission(true); releaseGroupSendPermission(false); this.conduit.stop(cause); this.disconnectCompleted = true; } public boolean isOpen() { return !disconnectCompleted; } /** returns the receiver to which this DirectChannel is delivering messages */ protected DistributedMembershipListener getReceiver() { return receiver; } /** * Returns the port on which this direct channel sends messages */ public int getPort() { return this.conduit.getPort(); } /** * Returns the conduit over which this channel sends messages * * @since 2.1 */ public TCPConduit getConduit() { return this.conduit; } private InetAddress initAddress(DistributedMembershipListener dm) { String bindAddress = System.getProperty("gemfire.jg-bind-address"); try { /* note: had to change the following to make sure the prop wasn't empty in addition to not null for admin.DistributedSystemFactory */ if (bindAddress != null && bindAddress.length() > 0) { return InetAddress.getByName(bindAddress); } else { return SocketCreator.getLocalHost(); } } catch (java.net.UnknownHostException unhe) { throw new RuntimeException(unhe); } } /** Create a TCPConduit stub from a JGroups InternalDistributedMember */ public Stub createConduitStub(InternalDistributedMember addr) { int port = addr.getDirectChannelPort(); Stub stub = new Stub(addr.getIpAddress(), port, addr.getVmViewId()); return stub; } public void closeEndpoint(InternalDistributedMember member, String reason) { closeEndpoint(member, reason, true); } /** * Closes any connections used to communicate with the given jgroupsAddress. */ public void closeEndpoint(InternalDistributedMember member, String reason, boolean notifyDisconnect) { TCPConduit tc = this.conduit; if (tc != null) { tc.removeEndpoint(createConduitStub(member), reason, notifyDisconnect); } } /** * adds state for thread-owned serial connections to the given member to * the parameter <i>result</i>. This can be used to wait for the state to * reach the given level in the member's vm. * @param member * the member whose state is to be captured * @param result * the map to add the state to * @since 5.1 */ public void getChannelStates(Stub member, HashMap result) { TCPConduit tc = this.conduit; if (tc != null) { tc.getThreadOwnedOrderedConnectionState(member, result); } } /** * wait for the given connections to process the number of messages * associated with the connection in the given map */ public void waitForChannelState(Stub member, HashMap channelState) throws InterruptedException { if (Thread.interrupted()) throw new InterruptedException(); TCPConduit tc = this.conduit; if (tc != null) { tc.waitForThreadOwnedOrderedConnectionState(member, channelState); } } /** * returns true if there are still receiver threads for the given member */ public boolean hasReceiversFor(Stub mbr) { return this.conduit.hasReceiversFor(mbr); } /** * cause the channel to be sick */ public void beSick() { TCPConduit tc = this.conduit; if (tc != null) { tc.beSick(); } } /** * cause the channel to be healthy */ public void beHealthy() { TCPConduit tc = this.conduit; if (tc != null) { tc.beHealthy(); } } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.datastream.v1alpha1; import com.google.api.pathtemplate.PathTemplate; import com.google.api.resourcenames.ResourceName; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. @Generated("by gapic-generator-java") public class PrivateConnectionName implements ResourceName { private static final PathTemplate PROJECT_LOCATION_PRIVATE_CONNECTION = PathTemplate.createWithoutUrlEncoding( "projects/{project}/locations/{location}/privateConnections/{private_connection}"); private volatile Map<String, String> fieldValuesMap; private final String project; private final String location; private final String privateConnection; @Deprecated protected PrivateConnectionName() { project = null; location = null; privateConnection = null; } private PrivateConnectionName(Builder builder) { project = Preconditions.checkNotNull(builder.getProject()); location = Preconditions.checkNotNull(builder.getLocation()); privateConnection = Preconditions.checkNotNull(builder.getPrivateConnection()); } public String getProject() { return project; } public String getLocation() { return location; } public String getPrivateConnection() { return privateConnection; } public static Builder newBuilder() { return new Builder(); } public Builder toBuilder() { return new Builder(this); } public static PrivateConnectionName of( String project, String location, String privateConnection) { return newBuilder() .setProject(project) .setLocation(location) .setPrivateConnection(privateConnection) .build(); } public static String format(String project, String location, String privateConnection) { return newBuilder() .setProject(project) .setLocation(location) .setPrivateConnection(privateConnection) .build() .toString(); } public static PrivateConnectionName parse(String formattedString) { if (formattedString.isEmpty()) { return null; } Map<String, String> matchMap = PROJECT_LOCATION_PRIVATE_CONNECTION.validatedMatch( formattedString, "PrivateConnectionName.parse: formattedString not in valid format"); return of( matchMap.get("project"), matchMap.get("location"), matchMap.get("private_connection")); } public static List<PrivateConnectionName> parseList(List<String> formattedStrings) { List<PrivateConnectionName> list = new ArrayList<>(formattedStrings.size()); for (String formattedString : formattedStrings) { list.add(parse(formattedString)); } return list; } public static List<String> toStringList(List<PrivateConnectionName> values) { List<String> list = new ArrayList<>(values.size()); for (PrivateConnectionName value : values) { if (value == null) { list.add(""); } else { list.add(value.toString()); } } return list; } public static boolean isParsableFrom(String formattedString) { return PROJECT_LOCATION_PRIVATE_CONNECTION.matches(formattedString); } @Override public Map<String, String> getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder<String, String> fieldMapBuilder = ImmutableMap.builder(); if (project != null) { fieldMapBuilder.put("project", project); } if (location != null) { fieldMapBuilder.put("location", location); } if (privateConnection != null) { fieldMapBuilder.put("private_connection", privateConnection); } fieldValuesMap = fieldMapBuilder.build(); } } } return fieldValuesMap; } public String getFieldValue(String fieldName) { return getFieldValuesMap().get(fieldName); } @Override public String toString() { return PROJECT_LOCATION_PRIVATE_CONNECTION.instantiate( "project", project, "location", location, "private_connection", privateConnection); } @Override public boolean equals(Object o) { if (o == this) { return true; } if (o != null || getClass() == o.getClass()) { PrivateConnectionName that = ((PrivateConnectionName) o); return Objects.equals(this.project, that.project) && Objects.equals(this.location, that.location) && Objects.equals(this.privateConnection, that.privateConnection); } return false; } @Override public int hashCode() { int h = 1; h *= 1000003; h ^= Objects.hashCode(project); h *= 1000003; h ^= Objects.hashCode(location); h *= 1000003; h ^= Objects.hashCode(privateConnection); return h; } /** * Builder for projects/{project}/locations/{location}/privateConnections/{private_connection}. */ public static class Builder { private String project; private String location; private String privateConnection; protected Builder() {} public String getProject() { return project; } public String getLocation() { return location; } public String getPrivateConnection() { return privateConnection; } public Builder setProject(String project) { this.project = project; return this; } public Builder setLocation(String location) { this.location = location; return this; } public Builder setPrivateConnection(String privateConnection) { this.privateConnection = privateConnection; return this; } private Builder(PrivateConnectionName privateConnectionName) { this.project = privateConnectionName.project; this.location = privateConnectionName.location; this.privateConnection = privateConnectionName.privateConnection; } public PrivateConnectionName build() { return new PrivateConnectionName(this); } } }
package main; import inputOutput.ExchangePredicates.UnosNodeInputPredicates; import inputOutput.Reports.UnosNodeAttributeSet; import inputOutput.core.Attribute; import inputOutput.core.AttributeConverter; import inputOutput.core.CompositeAttributeConverter; import inputOutput.core.CsvFormatUtil; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.List; import java.util.Map; import kepLib.KepInstance; import kepLib.KepParseData; import kepLib.KepTextReaderWriter; import multiPeriod.MultiPeriodCyclePacking.EffectiveNodeType; import multiPeriod.MultiPeriodCyclePacking.MultiPeriodCyclePackingInputs; import multiPeriod.TimeInstant; import multiPeriodAnalysis.Environment; import org.apache.commons.csv.CSVFormat; import org.apache.commons.csv.CSVPrinter; import protoModeler.UnosPredicateBuilder.UnosHistoricData; import unosData.UnosDonorEdge; import unosData.UnosExchangeUnit; import com.beust.jcommander.internal.Lists; import com.google.common.base.Function; import com.google.common.base.Functions; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import data.BloodType; import exchangeGraph.minWaitingTime.MinWaitingTimeProblemData; public class UnosInputSummary { /** * @param args */ public static void main(String[] args) { new File(outDir).mkdirs(); Environment<UnosExchangeUnit, UnosDonorEdge, Double> env = Environment .unosEnvironment(1, 0); MultiPeriodCyclePackingInputs<UnosExchangeUnit, UnosDonorEdge, Double> multiPeriodInputs = env .getMultiPeriodInputs(); UnosNodeAttributeSet attributeSet = new UnosNodeAttributeSet( multiPeriodInputs, (UnosHistoricData) env.getHistoricData()); UnosNodeInputPredicates inputPredicates = new UnosNodeInputPredicates( attributeSet); ExperimentInputs experimentInputs = new ExperimentInputs(attributeSet, inputPredicates, ImmutableList.copyOf(multiPeriodInputs.getGraph() .getVertices())); experimentPairMatchPowerDistribution(experimentInputs); experimentDonorPowerByBloodType(experimentInputs); experiementNodeBloodTypes(experimentInputs); experimentPatientPraByDonorBloodType(experimentInputs); experimentPatientPraVsDonorPower(experimentInputs); experimentPairSummaryInfo(experimentInputs); experimentMakeKEP(multiPeriodInputs); env.shutDown(); } private static String outDir = "output" + File.separator + "unosSummary" + File.separator; private static void experiementNodeBloodTypes(ExperimentInputs in) { UnosInputSummary summary = new UnosInputSummary(in.getExchangeUnits(), Predicates.and( in.getInputPredicates().effectiveNodeTypeIs( EffectiveNodeType.paired), in.getInputPredicates() .exactlyOneDonor())); UnosNodeAttributeSet att = in.getAttributeSet(); summary.addAttribute( string(att.getUnosNodeInputAttributes().makePatientNodeAttribute( att.getUnosPatientInputAttributes().getBloodType())), "patientBlood"); summary .addAttribute( string(att.getUnosNodeInputAttributes().makeDonorNodeAttribute( att.getUnosDonorInputAttributes().getBloodType(), 0)), "donorBlood"); summary.print(outDir + "patientDonorPairBlood.csv"); } private static void experimentDonorPowerByBloodType(ExperimentInputs in) { for (BloodType bloodType : BloodType.values()) { UnosInputSummary summary = new UnosInputSummary(in.getExchangeUnits(), Predicates.and( in.getInputPredicates().effectiveNodeTypeIs( EffectiveNodeType.paired), in.getInputPredicates() .exactlyOneDonor(), in.getInputPredicates() .firstDonorBloodTypeIs(bloodType))); summary.addAttribute(threeDecimals(in.getAttributeSet() .getNodeInputAttributes().getDonorPowerPostPreferences()), "donorPower"); summary.print(outDir + "donorPowerBlood" + bloodType + ".csv"); } } private static void experimentPatientPraByDonorBloodType(ExperimentInputs in) { for (BloodType bloodType : BloodType.values()) { UnosInputSummary summary = new UnosInputSummary(in.getExchangeUnits(), Predicates.and( in.getInputPredicates().effectiveNodeTypeIs( EffectiveNodeType.paired), in.getInputPredicates() .exactlyOneDonor(), in.getInputPredicates() .firstDonorBloodTypeIs(bloodType))); summary .addAttribute( threeDecimals(in .getAttributeSet() .getUnosNodeInputAttributes() .makePatientNodeAttribute( in.getAttributeSet().getUnosPatientInputAttributes() .getPra())), "patientPra"); summary.print(outDir + "patientPraForDonorBlood" + bloodType + ".csv"); } } private static void experimentPatientPraVsDonorPower(ExperimentInputs in) { UnosInputSummary summary = new UnosInputSummary(in.getExchangeUnits(), in .getInputPredicates().effectiveNodeTypeIs(EffectiveNodeType.paired)); summary .addAttribute(threeDecimals(in.getAttributeSet() .getNodeInputAttributes().getDonorPowerPostPreferences()), "donorPower"); summary .addAttribute( threeDecimals(in .getAttributeSet() .getUnosNodeInputAttributes() .makePatientNodeAttribute( in.getAttributeSet().getUnosPatientInputAttributes() .getPra())), "patientPra"); summary.print(outDir + "patientPraVsDonorPower.csv"); } private static void experimentPairMatchPowerDistribution(ExperimentInputs in) { UnosInputSummary summary = new UnosInputSummary(in.getExchangeUnits(), in .getInputPredicates().effectiveNodeTypeIs(EffectiveNodeType.paired)); summary .addAttribute(threeDecimals(in.getAttributeSet() .getNodeInputAttributes().getDonorPowerPostPreferences()), "donorPower"); summary.addAttribute(threeDecimals(in.getAttributeSet() .getNodeInputAttributes().getReceiverPowerPostPreferences()), "patientPower"); summary.addAttribute(noDecimals(in.getAttributeSet() .getNodeInputAttributes().getPairMatchPowerPostPreferences()), "pairMatchPower"); summary.print(outDir + "pairMatchPower.csv"); } private static void experimentMakeKEP( MultiPeriodCyclePackingInputs<UnosExchangeUnit, UnosDonorEdge, Double> multiPeriod) { final KepInstance<UnosExchangeUnit, UnosDonorEdge> kepInstance = new KepInstance<UnosExchangeUnit, UnosDonorEdge>( multiPeriod.getProblemData(), Functions.constant(1.0), Integer.MAX_VALUE, 3, 0); Function<UnosExchangeUnit, String> nodeNames = new Function<UnosExchangeUnit, String>() { public String apply(UnosExchangeUnit unit) { return kepInstance.getRootNodes().contains(unit) ? "a" + unit.getDonors().get(0).getId() : "p" + unit.getPatient().getId(); } }; Function<UnosDonorEdge, String> edgeNames = KepParseData .anonymousEdgeNames(kepInstance); KepTextReaderWriter.INSTANCE.write(kepInstance, nodeNames, edgeNames, outDir + "allUnosKep.csv"); ImmutableMap.Builder<UnosExchangeUnit, Double> nodeArrivalTimes = ImmutableMap .builder(); for (Map.Entry<UnosExchangeUnit, TimeInstant<Double>> entry : multiPeriod .getNodeArrivalTimes().entrySet()) { nodeArrivalTimes.put(entry.getKey(), entry.getValue().getValue()); } KepTextReaderWriter.INSTANCE.writeNodeArrivalTimes( outDir + "allUnosNodeArrivals.csv", new MinWaitingTimeProblemData<UnosExchangeUnit>(nodeArrivalTimes .build(), multiPeriod.getEndTime().getValue()), nodeNames); } private static void experimentPairSummaryInfo(ExperimentInputs in) { UnosInputSummary summary = new UnosInputSummary(in.getExchangeUnits(), in .getInputPredicates().effectiveNodeTypeIs(EffectiveNodeType.paired)); summary.addAttribute( string(in .getAttributeSet() .getUnosNodeInputAttributes() .makePatientNodeAttribute( in.getAttributeSet().getUnosPatientInputAttributes().getId())), "patientId"); summary .addAttribute(threeDecimals(in.getAttributeSet() .getNodeInputAttributes().getDonorPowerPostPreferences()), "donorPower"); summary.addAttribute(threeDecimals(in.getAttributeSet() .getNodeInputAttributes().getReceiverPowerPostPreferences()), "patientPower"); summary.addAttribute(noDecimals(in.getAttributeSet() .getNodeInputAttributes().getPairMatchPowerPostPreferences()), "pairMatchPower"); UnosNodeAttributeSet att = in.getAttributeSet(); summary.addAttribute( string(att.getUnosNodeInputAttributes().makePatientNodeAttribute( att.getUnosPatientInputAttributes().getBloodType())), "patientBlood"); summary .addAttribute( string(att.getUnosNodeInputAttributes().makeDonorNodeAttribute( att.getUnosDonorInputAttributes().getBloodType(), 0)), "donorBlood"); summary .addAttribute( threeDecimals(in .getAttributeSet() .getUnosNodeInputAttributes() .makePatientNodeAttribute( in.getAttributeSet().getUnosPatientInputAttributes() .getPra())), "patientPra"); summary.print(outDir + "pairSummaryInfo.csv"); } private Predicate<UnosExchangeUnit> nodeFilter; private List<String> attributeNames; private List<CompositeAttributeConverter<UnosExchangeUnit, ?>> attributeConverters; private ImmutableList<UnosExchangeUnit> exchangeUnits; public UnosInputSummary(ImmutableList<UnosExchangeUnit> exchangeUnits, Predicate<UnosExchangeUnit> nodeFilter) { this.nodeFilter = nodeFilter; this.attributeConverters = Lists.newArrayList(); attributeNames = Lists.newArrayList(); this.exchangeUnits = exchangeUnits; } public void addAttribute( CompositeAttributeConverter<UnosExchangeUnit, ?> converter, String name) { this.attributeConverters.add(converter); this.attributeNames.add(name); } public void print(String fileName) { try { BufferedWriter writer = new BufferedWriter(new FileWriter(fileName)); CSVPrinter printer = new CSVPrinter(writer, CSVFormat.EXCEL); // print headers for (int i = 0; i < attributeConverters.size(); i++) { printer.print(this.attributeNames.get(i)); } printer.println(); // print rows for (UnosExchangeUnit unit : this.exchangeUnits) { if (nodeFilter.apply(unit)) { for (AttributeConverter<UnosExchangeUnit> converter : this.attributeConverters) { printer.print(converter.apply(unit)); } printer.println(); } } printer.flush(); writer.close(); } catch (IOException e) { throw new RuntimeException(e); } } private static class ExperimentInputs { private UnosNodeAttributeSet attributeSet; private UnosNodeInputPredicates inputPredicates; private ImmutableList<UnosExchangeUnit> exchangeUnits; public UnosNodeAttributeSet getAttributeSet() { return attributeSet; } public UnosNodeInputPredicates getInputPredicates() { return inputPredicates; } public ImmutableList<UnosExchangeUnit> getExchangeUnits() { return exchangeUnits; } public ExperimentInputs(UnosNodeAttributeSet attributeSet, UnosNodeInputPredicates inputPredicates, ImmutableList<UnosExchangeUnit> exchangeUnits) { super(); this.attributeSet = attributeSet; this.inputPredicates = inputPredicates; this.exchangeUnits = exchangeUnits; } } private static CompositeAttributeConverter<UnosExchangeUnit, Double> noDecimals( Attribute<UnosExchangeUnit, Double> attribute) { return new CompositeAttributeConverter<UnosExchangeUnit, Double>(attribute, CsvFormatUtil.noDecimals); } private static CompositeAttributeConverter<UnosExchangeUnit, Double> threeDecimals( Attribute<UnosExchangeUnit, Double> attribute) { return new CompositeAttributeConverter<UnosExchangeUnit, Double>(attribute, CsvFormatUtil.threeDecimals); } private static <T> CompositeAttributeConverter<UnosExchangeUnit, T> string( Attribute<UnosExchangeUnit, T> attribute) { return new CompositeAttributeConverter<UnosExchangeUnit, T>(attribute, CsvFormatUtil.toStringFormat); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.imaging; import java.awt.Dimension; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.io.PrintWriter; import java.io.StringWriter; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.apache.commons.imaging.common.BinaryFileParser; import org.apache.commons.imaging.common.IBufferedImageFactory; import org.apache.commons.imaging.common.IImageMetadata; import org.apache.commons.imaging.common.SimpleBufferedImageFactory; import org.apache.commons.imaging.common.bytesource.ByteSource; import org.apache.commons.imaging.common.bytesource.ByteSourceArray; import org.apache.commons.imaging.common.bytesource.ByteSourceFile; import org.apache.commons.imaging.formats.bmp.BmpImageParser; import org.apache.commons.imaging.formats.dcx.DcxImageParser; import org.apache.commons.imaging.formats.gif.GifImageParser; import org.apache.commons.imaging.formats.icns.IcnsImageParser; import org.apache.commons.imaging.formats.ico.IcoImageParser; import org.apache.commons.imaging.formats.jpeg.JpegImageParser; import org.apache.commons.imaging.formats.pcx.PcxImageParser; import org.apache.commons.imaging.formats.png.PngImageParser; import org.apache.commons.imaging.formats.pnm.PnmImageParser; import org.apache.commons.imaging.formats.psd.PsdImageParser; import org.apache.commons.imaging.formats.rgbe.RgbeImageParser; import org.apache.commons.imaging.formats.tiff.TiffImageParser; import org.apache.commons.imaging.formats.wbmp.WbmpImageParser; import org.apache.commons.imaging.formats.xbm.XbmImageParser; import org.apache.commons.imaging.formats.xpm.XpmImageParser; import org.apache.commons.imaging.util.Debug; public abstract class ImageParser extends BinaryFileParser implements SanselanConstants { public static final ImageParser[] getAllImageParsers() { ImageParser result[] = { new JpegImageParser(), new TiffImageParser(), new PngImageParser(), new BmpImageParser(), new GifImageParser(), new PsdImageParser(), new PnmImageParser(), new IcoImageParser(), new IcnsImageParser(), new WbmpImageParser(), new PcxImageParser(), new DcxImageParser(), new XbmImageParser(), new XpmImageParser(), new RgbeImageParser() // new JBig2ImageParser(), // new TgaImageParser(), }; return result; } public final IImageMetadata getMetadata(ByteSource byteSource) throws ImageReadException, IOException { return getMetadata(byteSource, null); } public abstract IImageMetadata getMetadata(ByteSource byteSource, Map params) throws ImageReadException, IOException; public final IImageMetadata getMetadata(byte bytes[]) throws ImageReadException, IOException { return getMetadata(bytes, null); } public final IImageMetadata getMetadata(byte bytes[], Map params) throws ImageReadException, IOException { return getMetadata(new ByteSourceArray(bytes), params); } public final IImageMetadata getMetadata(File file) throws ImageReadException, IOException { return getMetadata(file, null); } public final IImageMetadata getMetadata(File file, Map params) throws ImageReadException, IOException { if (debug) System.out.println(getName() + ".getMetadata" + ": " + file.getName()); if (!canAcceptExtension(file)) return null; return getMetadata(new ByteSourceFile(file), params); } public abstract ImageInfo getImageInfo(ByteSource byteSource, Map params) throws ImageReadException, IOException; public final ImageInfo getImageInfo(ByteSource byteSource) throws ImageReadException, IOException { return getImageInfo(byteSource, null); } public final ImageInfo getImageInfo(byte bytes[], Map params) throws ImageReadException, IOException { return getImageInfo(new ByteSourceArray(bytes), params); } public final ImageInfo getImageInfo(File file, Map params) throws ImageReadException, IOException { if (!canAcceptExtension(file)) return null; return getImageInfo(new ByteSourceFile(file), params); } /** * @throws ImageReadException may be thrown by sub-classes * @throws IOException may be thrown by sub-classes */ public FormatCompliance getFormatCompliance(ByteSource byteSource) throws ImageReadException, IOException { return null; } public final FormatCompliance getFormatCompliance(byte bytes[]) throws ImageReadException, IOException { return getFormatCompliance(new ByteSourceArray(bytes)); } public final FormatCompliance getFormatCompliance(File file) throws ImageReadException, IOException { if (!canAcceptExtension(file)) return null; return getFormatCompliance(new ByteSourceFile(file)); } public List<BufferedImage> getAllBufferedImages(ByteSource byteSource) throws ImageReadException, IOException { BufferedImage bi = getBufferedImage(byteSource, null); List<BufferedImage> result = new ArrayList<BufferedImage>(); result.add(bi); return result; } public final List<BufferedImage> getAllBufferedImages(byte bytes[]) throws ImageReadException, IOException { return getAllBufferedImages(new ByteSourceArray(bytes)); } public final List<BufferedImage> getAllBufferedImages(File file) throws ImageReadException, IOException { if (!canAcceptExtension(file)) return null; return getAllBufferedImages(new ByteSourceFile(file)); } // public boolean extractImages(ByteSource byteSource, File dstDir, // String dstRoot, ImageParser encoder) throws ImageReadException, // IOException, ImageWriteException // { // List v = getAllBufferedImages(byteSource); // // if (v == null) // return false; // // for (int i = 0; i < v.size(); i++) // { // BufferedImage image = (BufferedImage) v.get(i); // File file = new File(dstDir, dstRoot + "_" + i // + encoder.getDefaultExtension()); // encoder.writeImage(image, new FileOutputStream(file), null); // } // // return false; // } // // public final boolean extractImages(byte bytes[], File dstDir, // String dstRoot, ImageParser encoder) // // throws ImageReadException, IOException, ImageWriteException // { // return extractImages(new ByteSourceArray(bytes), dstDir, dstRoot, // encoder); // } // // public final boolean extractImages(File file, File dstDir, // String dstRoot, ImageParser encoder) // // throws ImageReadException, IOException, ImageWriteException // { // if (!canAcceptExtension(file)) // return false; // // return extractImages(new ByteSourceFile(file), dstDir, dstRoot, // encoder); // } public abstract BufferedImage getBufferedImage(ByteSource byteSource, Map params) throws ImageReadException, IOException; public final BufferedImage getBufferedImage(byte bytes[], Map params) throws ImageReadException, IOException { return getBufferedImage(new ByteSourceArray(bytes), params); } public final BufferedImage getBufferedImage(File file, Map params) throws ImageReadException, IOException { if (!canAcceptExtension(file)) return null; return getBufferedImage(new ByteSourceFile(file), params); } /** * @throws IOException may be thrown by sub-classes */ public void writeImage(BufferedImage src, OutputStream os, Map params) throws ImageWriteException, IOException { try { os.close(); // we are obligated to close stream. } catch (Exception e) { Debug.debug(e); } throw new ImageWriteException("This image format (" + getName() + ") cannot be written."); } public final Dimension getImageSize(byte bytes[]) throws ImageReadException, IOException { return getImageSize(bytes, null); } public final Dimension getImageSize(byte bytes[], Map params) throws ImageReadException, IOException { return getImageSize(new ByteSourceArray(bytes), params); } public final Dimension getImageSize(File file) throws ImageReadException, IOException { return getImageSize(file, null); } public final Dimension getImageSize(File file, Map params) throws ImageReadException, IOException { if (!canAcceptExtension(file)) return null; return getImageSize(new ByteSourceFile(file), params); } public abstract Dimension getImageSize(ByteSource byteSource, Map params) throws ImageReadException, IOException; public abstract String getXmpXml(ByteSource byteSource, Map params) throws ImageReadException, IOException; public final byte[] getICCProfileBytes(byte bytes[]) throws ImageReadException, IOException { return getICCProfileBytes(bytes, null); } public final byte[] getICCProfileBytes(byte bytes[], Map params) throws ImageReadException, IOException { return getICCProfileBytes(new ByteSourceArray(bytes), params); } public final byte[] getICCProfileBytes(File file) throws ImageReadException, IOException { return getICCProfileBytes(file, null); } public final byte[] getICCProfileBytes(File file, Map params) throws ImageReadException, IOException { if (!canAcceptExtension(file)) return null; if (debug) System.out.println(getName() + ": " + file.getName()); return getICCProfileBytes(new ByteSourceFile(file), params); } public abstract byte[] getICCProfileBytes(ByteSource byteSource, Map params) throws ImageReadException, IOException; public final String dumpImageFile(byte bytes[]) throws ImageReadException, IOException { return dumpImageFile(new ByteSourceArray(bytes)); } public final String dumpImageFile(File file) throws ImageReadException, IOException { if (!canAcceptExtension(file)) return null; if (debug) System.out.println(getName() + ": " + file.getName()); return dumpImageFile(new ByteSourceFile(file)); } public final String dumpImageFile(ByteSource byteSource) throws ImageReadException, IOException { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); dumpImageFile(pw, byteSource); pw.flush(); return sw.toString(); } /** * @throws ImageReadException may be thrown by sub-classes * @throws IOException may be thrown by sub-classes */ public boolean dumpImageFile(PrintWriter pw, ByteSource byteSource) throws ImageReadException, IOException { return false; } public abstract boolean embedICCProfile(File src, File dst, byte profile[]); public abstract String getName(); public abstract String getDefaultExtension(); protected abstract String[] getAcceptedExtensions(); protected abstract ImageFormat[] getAcceptedTypes(); public boolean canAcceptType(ImageFormat type) { ImageFormat types[] = getAcceptedTypes(); for (int i = 0; i < types.length; i++) if (types[i].equals(type)) return true; return false; } protected final boolean canAcceptExtension(File file) { return canAcceptExtension(file.getName()); } protected final boolean canAcceptExtension(String filename) { String exts[] = getAcceptedExtensions(); if (exts == null) return true; int index = filename.lastIndexOf('.'); if (index >= 0) { String ext = filename.substring(index); ext = ext.toLowerCase(); for (int i = 0; i < exts.length; i++) if (exts[i].toLowerCase().equals(ext)) return true; } return false; } protected IBufferedImageFactory getBufferedImageFactory(Map params) { if (params == null) return new SimpleBufferedImageFactory(); IBufferedImageFactory result = (IBufferedImageFactory) params .get(SanselanConstants.BUFFERED_IMAGE_FACTORY); if (null != result) return result; return new SimpleBufferedImageFactory(); } public static final boolean isStrict(Map params) { if (params == null || !params.containsKey(PARAM_KEY_STRICT)) return false; return ((Boolean) params.get(PARAM_KEY_STRICT)).booleanValue(); } }
package com.fsck.k9.mail.internet; import com.fsck.k9.mail.Body; import com.fsck.k9.mail.BodyPart; import com.fsck.k9.mail.Message; import com.fsck.k9.mail.MessagingException; import com.fsck.k9.mail.Multipart; import com.fsck.k9.mail.Part; import org.apache.commons.io.IOUtils; import org.apache.commons.io.input.BoundedInputStream; import org.apache.james.mime4j.codec.Base64InputStream; import org.apache.james.mime4j.codec.QuotedPrintableInputStream; import org.apache.james.mime4j.util.MimeUtil; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Locale; import java.util.regex.Pattern; public class MimeUtility { public static final String DEFAULT_ATTACHMENT_MIME_TYPE = "application/octet-stream"; public static final String K9_SETTINGS_MIME_TYPE = "application/x-k9settings"; /* * http://www.w3schools.com/media/media_mimeref.asp * + * http://www.stdicon.com/mimetypes */ private static final String[][] MIME_TYPE_BY_EXTENSION_MAP = new String[][] { //* Do not delete the next three lines { "", DEFAULT_ATTACHMENT_MIME_TYPE }, { "k9s", K9_SETTINGS_MIME_TYPE}, { "txt", "text/plain"}, //* Do not delete the previous three lines { "123", "application/vnd.lotus-1-2-3"}, { "323", "text/h323"}, { "3dml", "text/vnd.in3d.3dml"}, { "3g2", "video/3gpp2"}, { "3gp", "video/3gpp"}, { "aab", "application/x-authorware-bin"}, { "aac", "audio/x-aac"}, { "aam", "application/x-authorware-map"}, { "a", "application/octet-stream"}, { "aas", "application/x-authorware-seg"}, { "abw", "application/x-abiword"}, { "acc", "application/vnd.americandynamics.acc"}, { "ace", "application/x-ace-compressed"}, { "acu", "application/vnd.acucobol"}, { "acutc", "application/vnd.acucorp"}, { "acx", "application/internet-property-stream"}, { "adp", "audio/adpcm"}, { "aep", "application/vnd.audiograph"}, { "afm", "application/x-font-type1"}, { "afp", "application/vnd.ibm.modcap"}, { "ai", "application/postscript"}, { "aif", "audio/x-aiff"}, { "aifc", "audio/x-aiff"}, { "aiff", "audio/x-aiff"}, { "air", "application/vnd.adobe.air-application-installer-package+zip"}, { "ami", "application/vnd.amiga.ami"}, { "apk", "application/vnd.android.package-archive"}, { "application", "application/x-ms-application"}, { "apr", "application/vnd.lotus-approach"}, { "asc", "application/pgp-signature"}, { "asf", "video/x-ms-asf"}, { "asm", "text/x-asm"}, { "aso", "application/vnd.accpac.simply.aso"}, { "asr", "video/x-ms-asf"}, { "asx", "video/x-ms-asf"}, { "atc", "application/vnd.acucorp"}, { "atom", "application/atom+xml"}, { "atomcat", "application/atomcat+xml"}, { "atomsvc", "application/atomsvc+xml"}, { "atx", "application/vnd.antix.game-component"}, { "au", "audio/basic"}, { "avi", "video/x-msvideo"}, { "aw", "application/applixware"}, { "axs", "application/olescript"}, { "azf", "application/vnd.airzip.filesecure.azf"}, { "azs", "application/vnd.airzip.filesecure.azs"}, { "azw", "application/vnd.amazon.ebook"}, { "bas", "text/plain"}, { "bat", "application/x-msdownload"}, { "bcpio", "application/x-bcpio"}, { "bdf", "application/x-font-bdf"}, { "bdm", "application/vnd.syncml.dm+wbxml"}, { "bh2", "application/vnd.fujitsu.oasysprs"}, { "bin", "application/octet-stream"}, { "bmi", "application/vnd.bmi"}, { "bmp", "image/bmp"}, { "book", "application/vnd.framemaker"}, { "box", "application/vnd.previewsystems.box"}, { "boz", "application/x-bzip2"}, { "bpk", "application/octet-stream"}, { "btif", "image/prs.btif"}, { "bz2", "application/x-bzip2"}, { "bz", "application/x-bzip"}, { "c4d", "application/vnd.clonk.c4group"}, { "c4f", "application/vnd.clonk.c4group"}, { "c4g", "application/vnd.clonk.c4group"}, { "c4p", "application/vnd.clonk.c4group"}, { "c4u", "application/vnd.clonk.c4group"}, { "cab", "application/vnd.ms-cab-compressed"}, { "car", "application/vnd.curl.car"}, { "cat", "application/vnd.ms-pki.seccat"}, { "cct", "application/x-director"}, { "cc", "text/x-c"}, { "ccxml", "application/ccxml+xml"}, { "cdbcmsg", "application/vnd.contact.cmsg"}, { "cdf", "application/x-cdf"}, { "cdkey", "application/vnd.mediastation.cdkey"}, { "cdx", "chemical/x-cdx"}, { "cdxml", "application/vnd.chemdraw+xml"}, { "cdy", "application/vnd.cinderella"}, { "cer", "application/x-x509-ca-cert"}, { "cgm", "image/cgm"}, { "chat", "application/x-chat"}, { "chm", "application/vnd.ms-htmlhelp"}, { "chrt", "application/vnd.kde.kchart"}, { "cif", "chemical/x-cif"}, { "cii", "application/vnd.anser-web-certificate-issue-initiation"}, { "cla", "application/vnd.claymore"}, { "class", "application/java-vm"}, { "clkk", "application/vnd.crick.clicker.keyboard"}, { "clkp", "application/vnd.crick.clicker.palette"}, { "clkt", "application/vnd.crick.clicker.template"}, { "clkw", "application/vnd.crick.clicker.wordbank"}, { "clkx", "application/vnd.crick.clicker"}, { "clp", "application/x-msclip"}, { "cmc", "application/vnd.cosmocaller"}, { "cmdf", "chemical/x-cmdf"}, { "cml", "chemical/x-cml"}, { "cmp", "application/vnd.yellowriver-custom-menu"}, { "cmx", "image/x-cmx"}, { "cod", "application/vnd.rim.cod"}, { "com", "application/x-msdownload"}, { "conf", "text/plain"}, { "cpio", "application/x-cpio"}, { "cpp", "text/x-c"}, { "cpt", "application/mac-compactpro"}, { "crd", "application/x-mscardfile"}, { "crl", "application/pkix-crl"}, { "crt", "application/x-x509-ca-cert"}, { "csh", "application/x-csh"}, { "csml", "chemical/x-csml"}, { "csp", "application/vnd.commonspace"}, { "css", "text/css"}, { "cst", "application/x-director"}, { "csv", "text/csv"}, { "c", "text/plain"}, { "cu", "application/cu-seeme"}, { "curl", "text/vnd.curl"}, { "cww", "application/prs.cww"}, { "cxt", "application/x-director"}, { "cxx", "text/x-c"}, { "daf", "application/vnd.mobius.daf"}, { "dataless", "application/vnd.fdsn.seed"}, { "davmount", "application/davmount+xml"}, { "dcr", "application/x-director"}, { "dcurl", "text/vnd.curl.dcurl"}, { "dd2", "application/vnd.oma.dd2+xml"}, { "ddd", "application/vnd.fujixerox.ddd"}, { "deb", "application/x-debian-package"}, { "def", "text/plain"}, { "deploy", "application/octet-stream"}, { "der", "application/x-x509-ca-cert"}, { "dfac", "application/vnd.dreamfactory"}, { "dic", "text/x-c"}, { "diff", "text/plain"}, { "dir", "application/x-director"}, { "dis", "application/vnd.mobius.dis"}, { "dist", "application/octet-stream"}, { "distz", "application/octet-stream"}, { "djv", "image/vnd.djvu"}, { "djvu", "image/vnd.djvu"}, { "dll", "application/x-msdownload"}, { "dmg", "application/octet-stream"}, { "dms", "application/octet-stream"}, { "dna", "application/vnd.dna"}, { "doc", "application/msword"}, { "docm", "application/vnd.ms-word.document.macroenabled.12"}, { "docx", "application/vnd.openxmlformats-officedocument.wordprocessingml.document"}, { "dot", "application/msword"}, { "dotm", "application/vnd.ms-word.template.macroenabled.12"}, { "dotx", "application/vnd.openxmlformats-officedocument.wordprocessingml.template"}, { "dp", "application/vnd.osgi.dp"}, { "dpg", "application/vnd.dpgraph"}, { "dsc", "text/prs.lines.tag"}, { "dtb", "application/x-dtbook+xml"}, { "dtd", "application/xml-dtd"}, { "dts", "audio/vnd.dts"}, { "dtshd", "audio/vnd.dts.hd"}, { "dump", "application/octet-stream"}, { "dvi", "application/x-dvi"}, { "dwf", "model/vnd.dwf"}, { "dwg", "image/vnd.dwg"}, { "dxf", "image/vnd.dxf"}, { "dxp", "application/vnd.spotfire.dxp"}, { "dxr", "application/x-director"}, { "ecelp4800", "audio/vnd.nuera.ecelp4800"}, { "ecelp7470", "audio/vnd.nuera.ecelp7470"}, { "ecelp9600", "audio/vnd.nuera.ecelp9600"}, { "ecma", "application/ecmascript"}, { "edm", "application/vnd.novadigm.edm"}, { "edx", "application/vnd.novadigm.edx"}, { "efif", "application/vnd.picsel"}, { "ei6", "application/vnd.pg.osasli"}, { "elc", "application/octet-stream"}, { "eml", "message/rfc822"}, { "emma", "application/emma+xml"}, { "eol", "audio/vnd.digital-winds"}, { "eot", "application/vnd.ms-fontobject"}, { "eps", "application/postscript"}, { "epub", "application/epub+zip"}, { "es3", "application/vnd.eszigno3+xml"}, { "esf", "application/vnd.epson.esf"}, { "et3", "application/vnd.eszigno3+xml"}, { "etx", "text/x-setext"}, { "evy", "application/envoy"}, { "exe", "application/octet-stream"}, { "ext", "application/vnd.novadigm.ext"}, { "ez2", "application/vnd.ezpix-album"}, { "ez3", "application/vnd.ezpix-package"}, { "ez", "application/andrew-inset"}, { "f4v", "video/x-f4v"}, { "f77", "text/x-fortran"}, { "f90", "text/x-fortran"}, { "fbs", "image/vnd.fastbidsheet"}, { "fdf", "application/vnd.fdf"}, { "fe_launch", "application/vnd.denovo.fcselayout-link"}, { "fg5", "application/vnd.fujitsu.oasysgp"}, { "fgd", "application/x-director"}, { "fh4", "image/x-freehand"}, { "fh5", "image/x-freehand"}, { "fh7", "image/x-freehand"}, { "fhc", "image/x-freehand"}, { "fh", "image/x-freehand"}, { "fif", "application/fractals"}, { "fig", "application/x-xfig"}, { "fli", "video/x-fli"}, { "flo", "application/vnd.micrografx.flo"}, { "flr", "x-world/x-vrml"}, { "flv", "video/x-flv"}, { "flw", "application/vnd.kde.kivio"}, { "flx", "text/vnd.fmi.flexstor"}, { "fly", "text/vnd.fly"}, { "fm", "application/vnd.framemaker"}, { "fnc", "application/vnd.frogans.fnc"}, { "for", "text/x-fortran"}, { "fpx", "image/vnd.fpx"}, { "frame", "application/vnd.framemaker"}, { "fsc", "application/vnd.fsc.weblaunch"}, { "fst", "image/vnd.fst"}, { "ftc", "application/vnd.fluxtime.clip"}, { "f", "text/x-fortran"}, { "fti", "application/vnd.anser-web-funds-transfer-initiation"}, { "fvt", "video/vnd.fvt"}, { "fzs", "application/vnd.fuzzysheet"}, { "g3", "image/g3fax"}, { "gac", "application/vnd.groove-account"}, { "gdl", "model/vnd.gdl"}, { "geo", "application/vnd.dynageo"}, { "gex", "application/vnd.geometry-explorer"}, { "ggb", "application/vnd.geogebra.file"}, { "ggt", "application/vnd.geogebra.tool"}, { "ghf", "application/vnd.groove-help"}, { "gif", "image/gif"}, { "gim", "application/vnd.groove-identity-message"}, { "gmx", "application/vnd.gmx"}, { "gnumeric", "application/x-gnumeric"}, { "gph", "application/vnd.flographit"}, { "gqf", "application/vnd.grafeq"}, { "gqs", "application/vnd.grafeq"}, { "gram", "application/srgs"}, { "gre", "application/vnd.geometry-explorer"}, { "grv", "application/vnd.groove-injector"}, { "grxml", "application/srgs+xml"}, { "gsf", "application/x-font-ghostscript"}, { "gtar", "application/x-gtar"}, { "gtm", "application/vnd.groove-tool-message"}, { "gtw", "model/vnd.gtw"}, { "gv", "text/vnd.graphviz"}, { "gz", "application/x-gzip"}, { "h261", "video/h261"}, { "h263", "video/h263"}, { "h264", "video/h264"}, { "hbci", "application/vnd.hbci"}, { "hdf", "application/x-hdf"}, { "hh", "text/x-c"}, { "hlp", "application/winhlp"}, { "hpgl", "application/vnd.hp-hpgl"}, { "hpid", "application/vnd.hp-hpid"}, { "hps", "application/vnd.hp-hps"}, { "hqx", "application/mac-binhex40"}, { "hta", "application/hta"}, { "htc", "text/x-component"}, { "h", "text/plain"}, { "htke", "application/vnd.kenameaapp"}, { "html", "text/html"}, { "htm", "text/html"}, { "htt", "text/webviewhtml"}, { "hvd", "application/vnd.yamaha.hv-dic"}, { "hvp", "application/vnd.yamaha.hv-voice"}, { "hvs", "application/vnd.yamaha.hv-script"}, { "icc", "application/vnd.iccprofile"}, { "ice", "x-conference/x-cooltalk"}, { "icm", "application/vnd.iccprofile"}, { "ico", "image/x-icon"}, { "ics", "text/calendar"}, { "ief", "image/ief"}, { "ifb", "text/calendar"}, { "ifm", "application/vnd.shana.informed.formdata"}, { "iges", "model/iges"}, { "igl", "application/vnd.igloader"}, { "igs", "model/iges"}, { "igx", "application/vnd.micrografx.igx"}, { "iif", "application/vnd.shana.informed.interchange"}, { "iii", "application/x-iphone"}, { "imp", "application/vnd.accpac.simply.imp"}, { "ims", "application/vnd.ms-ims"}, { "ins", "application/x-internet-signup"}, { "in", "text/plain"}, { "ipk", "application/vnd.shana.informed.package"}, { "irm", "application/vnd.ibm.rights-management"}, { "irp", "application/vnd.irepository.package+xml"}, { "iso", "application/octet-stream"}, { "isp", "application/x-internet-signup"}, { "itp", "application/vnd.shana.informed.formtemplate"}, { "ivp", "application/vnd.immervision-ivp"}, { "ivu", "application/vnd.immervision-ivu"}, { "jad", "text/vnd.sun.j2me.app-descriptor"}, { "jam", "application/vnd.jam"}, { "jar", "application/java-archive"}, { "java", "text/x-java-source"}, { "jfif", "image/pipeg"}, { "jisp", "application/vnd.jisp"}, { "jlt", "application/vnd.hp-jlyt"}, { "jnlp", "application/x-java-jnlp-file"}, { "joda", "application/vnd.joost.joda-archive"}, { "jpeg", "image/jpeg"}, { "jpe", "image/jpeg"}, { "jpg", "image/jpeg"}, { "jpgm", "video/jpm"}, { "jpgv", "video/jpeg"}, { "jpm", "video/jpm"}, { "js", "application/x-javascript"}, { "json", "application/json"}, { "kar", "audio/midi"}, { "karbon", "application/vnd.kde.karbon"}, { "kfo", "application/vnd.kde.kformula"}, { "kia", "application/vnd.kidspiration"}, { "kil", "application/x-killustrator"}, { "kml", "application/vnd.google-earth.kml+xml"}, { "kmz", "application/vnd.google-earth.kmz"}, { "kne", "application/vnd.kinar"}, { "knp", "application/vnd.kinar"}, { "kon", "application/vnd.kde.kontour"}, { "kpr", "application/vnd.kde.kpresenter"}, { "kpt", "application/vnd.kde.kpresenter"}, { "ksh", "text/plain"}, { "ksp", "application/vnd.kde.kspread"}, { "ktr", "application/vnd.kahootz"}, { "ktz", "application/vnd.kahootz"}, { "kwd", "application/vnd.kde.kword"}, { "kwt", "application/vnd.kde.kword"}, { "latex", "application/x-latex"}, { "lbd", "application/vnd.llamagraphics.life-balance.desktop"}, { "lbe", "application/vnd.llamagraphics.life-balance.exchange+xml"}, { "les", "application/vnd.hhe.lesson-player"}, { "lha", "application/octet-stream"}, { "link66", "application/vnd.route66.link66+xml"}, { "list3820", "application/vnd.ibm.modcap"}, { "listafp", "application/vnd.ibm.modcap"}, { "list", "text/plain"}, { "log", "text/plain"}, { "lostxml", "application/lost+xml"}, { "lrf", "application/octet-stream"}, { "lrm", "application/vnd.ms-lrm"}, { "lsf", "video/x-la-asf"}, { "lsx", "video/x-la-asf"}, { "ltf", "application/vnd.frogans.ltf"}, { "lvp", "audio/vnd.lucent.voice"}, { "lwp", "application/vnd.lotus-wordpro"}, { "lzh", "application/octet-stream"}, { "m13", "application/x-msmediaview"}, { "m14", "application/x-msmediaview"}, { "m1v", "video/mpeg"}, { "m2a", "audio/mpeg"}, { "m2v", "video/mpeg"}, { "m3a", "audio/mpeg"}, { "m3u", "audio/x-mpegurl"}, { "m4u", "video/vnd.mpegurl"}, { "m4v", "video/x-m4v"}, { "ma", "application/mathematica"}, { "mag", "application/vnd.ecowin.chart"}, { "maker", "application/vnd.framemaker"}, { "man", "text/troff"}, { "mathml", "application/mathml+xml"}, { "mb", "application/mathematica"}, { "mbk", "application/vnd.mobius.mbk"}, { "mbox", "application/mbox"}, { "mc1", "application/vnd.medcalcdata"}, { "mcd", "application/vnd.mcd"}, { "mcurl", "text/vnd.curl.mcurl"}, { "mdb", "application/x-msaccess"}, { "mdi", "image/vnd.ms-modi"}, { "mesh", "model/mesh"}, { "me", "text/troff"}, { "mfm", "application/vnd.mfmp"}, { "mgz", "application/vnd.proteus.magazine"}, { "mht", "message/rfc822"}, { "mhtml", "message/rfc822"}, { "mid", "audio/midi"}, { "midi", "audio/midi"}, { "mif", "application/vnd.mif"}, { "mime", "message/rfc822"}, { "mj2", "video/mj2"}, { "mjp2", "video/mj2"}, { "mlp", "application/vnd.dolby.mlp"}, { "mmd", "application/vnd.chipnuts.karaoke-mmd"}, { "mmf", "application/vnd.smaf"}, { "mmr", "image/vnd.fujixerox.edmics-mmr"}, { "mny", "application/x-msmoney"}, { "mobi", "application/x-mobipocket-ebook"}, { "movie", "video/x-sgi-movie"}, { "mov", "video/quicktime"}, { "mp2a", "audio/mpeg"}, { "mp2", "video/mpeg"}, { "mp3", "audio/mpeg"}, { "mp4a", "audio/mp4"}, { "mp4s", "application/mp4"}, { "mp4", "video/mp4"}, { "mp4v", "video/mp4"}, { "mpa", "video/mpeg"}, { "mpc", "application/vnd.mophun.certificate"}, { "mpeg", "video/mpeg"}, { "mpe", "video/mpeg"}, { "mpg4", "video/mp4"}, { "mpga", "audio/mpeg"}, { "mpg", "video/mpeg"}, { "mpkg", "application/vnd.apple.installer+xml"}, { "mpm", "application/vnd.blueice.multipass"}, { "mpn", "application/vnd.mophun.application"}, { "mpp", "application/vnd.ms-project"}, { "mpt", "application/vnd.ms-project"}, { "mpv2", "video/mpeg"}, { "mpy", "application/vnd.ibm.minipay"}, { "mqy", "application/vnd.mobius.mqy"}, { "mrc", "application/marc"}, { "mscml", "application/mediaservercontrol+xml"}, { "mseed", "application/vnd.fdsn.mseed"}, { "mseq", "application/vnd.mseq"}, { "msf", "application/vnd.epson.msf"}, { "msh", "model/mesh"}, { "msi", "application/x-msdownload"}, { "ms", "text/troff"}, { "msty", "application/vnd.muvee.style"}, { "mts", "model/vnd.mts"}, { "mus", "application/vnd.musician"}, { "musicxml", "application/vnd.recordare.musicxml+xml"}, { "mvb", "application/x-msmediaview"}, { "mxf", "application/mxf"}, { "mxl", "application/vnd.recordare.musicxml"}, { "mxml", "application/xv+xml"}, { "mxs", "application/vnd.triscape.mxs"}, { "mxu", "video/vnd.mpegurl"}, { "nb", "application/mathematica"}, { "nc", "application/x-netcdf"}, { "ncx", "application/x-dtbncx+xml"}, { "n-gage", "application/vnd.nokia.n-gage.symbian.install"}, { "ngdat", "application/vnd.nokia.n-gage.data"}, { "nlu", "application/vnd.neurolanguage.nlu"}, { "nml", "application/vnd.enliven"}, { "nnd", "application/vnd.noblenet-directory"}, { "nns", "application/vnd.noblenet-sealer"}, { "nnw", "application/vnd.noblenet-web"}, { "npx", "image/vnd.net-fpx"}, { "nsf", "application/vnd.lotus-notes"}, { "nws", "message/rfc822"}, { "oa2", "application/vnd.fujitsu.oasys2"}, { "oa3", "application/vnd.fujitsu.oasys3"}, { "o", "application/octet-stream"}, { "oas", "application/vnd.fujitsu.oasys"}, { "obd", "application/x-msbinder"}, { "obj", "application/octet-stream"}, { "oda", "application/oda"}, { "odb", "application/vnd.oasis.opendocument.database"}, { "odc", "application/vnd.oasis.opendocument.chart"}, { "odf", "application/vnd.oasis.opendocument.formula"}, { "odft", "application/vnd.oasis.opendocument.formula-template"}, { "odg", "application/vnd.oasis.opendocument.graphics"}, { "odi", "application/vnd.oasis.opendocument.image"}, { "odp", "application/vnd.oasis.opendocument.presentation"}, { "ods", "application/vnd.oasis.opendocument.spreadsheet"}, { "odt", "application/vnd.oasis.opendocument.text"}, { "oga", "audio/ogg"}, { "ogg", "audio/ogg"}, { "ogv", "video/ogg"}, { "ogx", "application/ogg"}, { "onepkg", "application/onenote"}, { "onetmp", "application/onenote"}, { "onetoc2", "application/onenote"}, { "onetoc", "application/onenote"}, { "opf", "application/oebps-package+xml"}, { "oprc", "application/vnd.palm"}, { "org", "application/vnd.lotus-organizer"}, { "osf", "application/vnd.yamaha.openscoreformat"}, { "osfpvg", "application/vnd.yamaha.openscoreformat.osfpvg+xml"}, { "otc", "application/vnd.oasis.opendocument.chart-template"}, { "otf", "application/x-font-otf"}, { "otg", "application/vnd.oasis.opendocument.graphics-template"}, { "oth", "application/vnd.oasis.opendocument.text-web"}, { "oti", "application/vnd.oasis.opendocument.image-template"}, { "otm", "application/vnd.oasis.opendocument.text-master"}, { "otp", "application/vnd.oasis.opendocument.presentation-template"}, { "ots", "application/vnd.oasis.opendocument.spreadsheet-template"}, { "ott", "application/vnd.oasis.opendocument.text-template"}, { "oxt", "application/vnd.openofficeorg.extension"}, { "p10", "application/pkcs10"}, { "p12", "application/x-pkcs12"}, { "p7b", "application/x-pkcs7-certificates"}, { "p7c", "application/x-pkcs7-mime"}, { "p7m", "application/x-pkcs7-mime"}, { "p7r", "application/x-pkcs7-certreqresp"}, { "p7s", "application/x-pkcs7-signature"}, { "pas", "text/x-pascal"}, { "pbd", "application/vnd.powerbuilder6"}, { "pbm", "image/x-portable-bitmap"}, { "pcf", "application/x-font-pcf"}, { "pcl", "application/vnd.hp-pcl"}, { "pclxl", "application/vnd.hp-pclxl"}, { "pct", "image/x-pict"}, { "pcurl", "application/vnd.curl.pcurl"}, { "pcx", "image/x-pcx"}, { "pdb", "application/vnd.palm"}, { "pdf", "application/pdf"}, { "pfa", "application/x-font-type1"}, { "pfb", "application/x-font-type1"}, { "pfm", "application/x-font-type1"}, { "pfr", "application/font-tdpfr"}, { "pfx", "application/x-pkcs12"}, { "pgm", "image/x-portable-graymap"}, { "pgn", "application/x-chess-pgn"}, { "pgp", "application/pgp-encrypted"}, { "pic", "image/x-pict"}, { "pkg", "application/octet-stream"}, { "pki", "application/pkixcmp"}, { "pkipath", "application/pkix-pkipath"}, { "pkpass", "application/vnd-com.apple.pkpass"}, { "pko", "application/ynd.ms-pkipko"}, { "plb", "application/vnd.3gpp.pic-bw-large"}, { "plc", "application/vnd.mobius.plc"}, { "plf", "application/vnd.pocketlearn"}, { "pls", "application/pls+xml"}, { "pl", "text/plain"}, { "pma", "application/x-perfmon"}, { "pmc", "application/x-perfmon"}, { "pml", "application/x-perfmon"}, { "pmr", "application/x-perfmon"}, { "pmw", "application/x-perfmon"}, { "png", "image/png"}, { "pnm", "image/x-portable-anymap"}, { "portpkg", "application/vnd.macports.portpkg"}, { "pot,", "application/vnd.ms-powerpoint"}, { "pot", "application/vnd.ms-powerpoint"}, { "potm", "application/vnd.ms-powerpoint.template.macroenabled.12"}, { "potx", "application/vnd.openxmlformats-officedocument.presentationml.template"}, { "ppa", "application/vnd.ms-powerpoint"}, { "ppam", "application/vnd.ms-powerpoint.addin.macroenabled.12"}, { "ppd", "application/vnd.cups-ppd"}, { "ppm", "image/x-portable-pixmap"}, { "pps", "application/vnd.ms-powerpoint"}, { "ppsm", "application/vnd.ms-powerpoint.slideshow.macroenabled.12"}, { "ppsx", "application/vnd.openxmlformats-officedocument.presentationml.slideshow"}, { "ppt", "application/vnd.ms-powerpoint"}, { "pptm", "application/vnd.ms-powerpoint.presentation.macroenabled.12"}, { "pptx", "application/vnd.openxmlformats-officedocument.presentationml.presentation"}, { "pqa", "application/vnd.palm"}, { "prc", "application/x-mobipocket-ebook"}, { "pre", "application/vnd.lotus-freelance"}, { "prf", "application/pics-rules"}, { "ps", "application/postscript"}, { "psb", "application/vnd.3gpp.pic-bw-small"}, { "psd", "image/vnd.adobe.photoshop"}, { "psf", "application/x-font-linux-psf"}, { "p", "text/x-pascal"}, { "ptid", "application/vnd.pvi.ptid1"}, { "pub", "application/x-mspublisher"}, { "pvb", "application/vnd.3gpp.pic-bw-var"}, { "pwn", "application/vnd.3m.post-it-notes"}, { "pwz", "application/vnd.ms-powerpoint"}, { "pya", "audio/vnd.ms-playready.media.pya"}, { "pyc", "application/x-python-code"}, { "pyo", "application/x-python-code"}, { "py", "text/x-python"}, { "pyv", "video/vnd.ms-playready.media.pyv"}, { "qam", "application/vnd.epson.quickanime"}, { "qbo", "application/vnd.intu.qbo"}, { "qfx", "application/vnd.intu.qfx"}, { "qps", "application/vnd.publishare-delta-tree"}, { "qt", "video/quicktime"}, { "qwd", "application/vnd.quark.quarkxpress"}, { "qwt", "application/vnd.quark.quarkxpress"}, { "qxb", "application/vnd.quark.quarkxpress"}, { "qxd", "application/vnd.quark.quarkxpress"}, { "qxl", "application/vnd.quark.quarkxpress"}, { "qxt", "application/vnd.quark.quarkxpress"}, { "ra", "audio/x-pn-realaudio"}, { "ram", "audio/x-pn-realaudio"}, { "rar", "application/x-rar-compressed"}, { "ras", "image/x-cmu-raster"}, { "rcprofile", "application/vnd.ipunplugged.rcprofile"}, { "rdf", "application/rdf+xml"}, { "rdz", "application/vnd.data-vision.rdz"}, { "rep", "application/vnd.businessobjects"}, { "res", "application/x-dtbresource+xml"}, { "rgb", "image/x-rgb"}, { "rif", "application/reginfo+xml"}, { "rl", "application/resource-lists+xml"}, { "rlc", "image/vnd.fujixerox.edmics-rlc"}, { "rld", "application/resource-lists-diff+xml"}, { "rm", "application/vnd.rn-realmedia"}, { "rmi", "audio/midi"}, { "rmp", "audio/x-pn-realaudio-plugin"}, { "rms", "application/vnd.jcp.javame.midlet-rms"}, { "rnc", "application/relax-ng-compact-syntax"}, { "roff", "text/troff"}, { "rpm", "application/x-rpm"}, { "rpss", "application/vnd.nokia.radio-presets"}, { "rpst", "application/vnd.nokia.radio-preset"}, { "rq", "application/sparql-query"}, { "rs", "application/rls-services+xml"}, { "rsd", "application/rsd+xml"}, { "rss", "application/rss+xml"}, { "rtf", "application/rtf"}, { "rtx", "text/richtext"}, { "saf", "application/vnd.yamaha.smaf-audio"}, { "sbml", "application/sbml+xml"}, { "sc", "application/vnd.ibm.secure-container"}, { "scd", "application/x-msschedule"}, { "scm", "application/vnd.lotus-screencam"}, { "scq", "application/scvp-cv-request"}, { "scs", "application/scvp-cv-response"}, { "sct", "text/scriptlet"}, { "scurl", "text/vnd.curl.scurl"}, { "sda", "application/vnd.stardivision.draw"}, { "sdc", "application/vnd.stardivision.calc"}, { "sdd", "application/vnd.stardivision.impress"}, { "sdkd", "application/vnd.solent.sdkm+xml"}, { "sdkm", "application/vnd.solent.sdkm+xml"}, { "sdp", "application/sdp"}, { "sdw", "application/vnd.stardivision.writer"}, { "see", "application/vnd.seemail"}, { "seed", "application/vnd.fdsn.seed"}, { "sema", "application/vnd.sema"}, { "semd", "application/vnd.semd"}, { "semf", "application/vnd.semf"}, { "ser", "application/java-serialized-object"}, { "setpay", "application/set-payment-initiation"}, { "setreg", "application/set-registration-initiation"}, { "sfd-hdstx", "application/vnd.hydrostatix.sof-data"}, { "sfs", "application/vnd.spotfire.sfs"}, { "sgl", "application/vnd.stardivision.writer-global"}, { "sgml", "text/sgml"}, { "sgm", "text/sgml"}, { "sh", "application/x-sh"}, { "shar", "application/x-shar"}, { "shf", "application/shf+xml"}, { "sic", "application/vnd.wap.sic"}, { "sig", "application/pgp-signature"}, { "silo", "model/mesh"}, { "sis", "application/vnd.symbian.install"}, { "sisx", "application/vnd.symbian.install"}, { "sit", "application/x-stuffit"}, { "si", "text/vnd.wap.si"}, { "sitx", "application/x-stuffitx"}, { "skd", "application/vnd.koan"}, { "skm", "application/vnd.koan"}, { "skp", "application/vnd.koan"}, { "skt", "application/vnd.koan"}, { "slc", "application/vnd.wap.slc"}, { "sldm", "application/vnd.ms-powerpoint.slide.macroenabled.12"}, { "sldx", "application/vnd.openxmlformats-officedocument.presentationml.slide"}, { "slt", "application/vnd.epson.salt"}, { "sl", "text/vnd.wap.sl"}, { "smf", "application/vnd.stardivision.math"}, { "smi", "application/smil+xml"}, { "smil", "application/smil+xml"}, { "snd", "audio/basic"}, { "snf", "application/x-font-snf"}, { "so", "application/octet-stream"}, { "spc", "application/x-pkcs7-certificates"}, { "spf", "application/vnd.yamaha.smaf-phrase"}, { "spl", "application/x-futuresplash"}, { "spot", "text/vnd.in3d.spot"}, { "spp", "application/scvp-vp-response"}, { "spq", "application/scvp-vp-request"}, { "spx", "audio/ogg"}, { "src", "application/x-wais-source"}, { "srx", "application/sparql-results+xml"}, { "sse", "application/vnd.kodak-descriptor"}, { "ssf", "application/vnd.epson.ssf"}, { "ssml", "application/ssml+xml"}, { "sst", "application/vnd.ms-pkicertstore"}, { "stc", "application/vnd.sun.xml.calc.template"}, { "std", "application/vnd.sun.xml.draw.template"}, { "s", "text/x-asm"}, { "stf", "application/vnd.wt.stf"}, { "sti", "application/vnd.sun.xml.impress.template"}, { "stk", "application/hyperstudio"}, { "stl", "application/vnd.ms-pki.stl"}, { "stm", "text/html"}, { "str", "application/vnd.pg.format"}, { "stw", "application/vnd.sun.xml.writer.template"}, { "sus", "application/vnd.sus-calendar"}, { "susp", "application/vnd.sus-calendar"}, { "sv4cpio", "application/x-sv4cpio"}, { "sv4crc", "application/x-sv4crc"}, { "svd", "application/vnd.svd"}, { "svg", "image/svg+xml"}, { "svgz", "image/svg+xml"}, { "swa", "application/x-director"}, { "swf", "application/x-shockwave-flash"}, { "swi", "application/vnd.arastra.swi"}, { "sxc", "application/vnd.sun.xml.calc"}, { "sxd", "application/vnd.sun.xml.draw"}, { "sxg", "application/vnd.sun.xml.writer.global"}, { "sxi", "application/vnd.sun.xml.impress"}, { "sxm", "application/vnd.sun.xml.math"}, { "sxw", "application/vnd.sun.xml.writer"}, { "tao", "application/vnd.tao.intent-module-archive"}, { "t", "application/x-troff"}, { "tar", "application/x-tar"}, { "tcap", "application/vnd.3gpp2.tcap"}, { "tcl", "application/x-tcl"}, { "teacher", "application/vnd.smart.teacher"}, { "tex", "application/x-tex"}, { "texi", "application/x-texinfo"}, { "texinfo", "application/x-texinfo"}, { "text", "text/plain"}, { "tfm", "application/x-tex-tfm"}, { "tgz", "application/x-gzip"}, { "tiff", "image/tiff"}, { "tif", "image/tiff"}, { "tmo", "application/vnd.tmobile-livetv"}, { "torrent", "application/x-bittorrent"}, { "tpl", "application/vnd.groove-tool-template"}, { "tpt", "application/vnd.trid.tpt"}, { "tra", "application/vnd.trueapp"}, { "trm", "application/x-msterminal"}, { "tr", "text/troff"}, { "tsv", "text/tab-separated-values"}, { "ttc", "application/x-font-ttf"}, { "ttf", "application/x-font-ttf"}, { "twd", "application/vnd.simtech-mindmapper"}, { "twds", "application/vnd.simtech-mindmapper"}, { "txd", "application/vnd.genomatix.tuxedo"}, { "txf", "application/vnd.mobius.txf"}, { "txt", "text/plain"}, { "u32", "application/x-authorware-bin"}, { "udeb", "application/x-debian-package"}, { "ufd", "application/vnd.ufdl"}, { "ufdl", "application/vnd.ufdl"}, { "uls", "text/iuls"}, { "umj", "application/vnd.umajin"}, { "unityweb", "application/vnd.unity"}, { "uoml", "application/vnd.uoml+xml"}, { "uris", "text/uri-list"}, { "uri", "text/uri-list"}, { "urls", "text/uri-list"}, { "ustar", "application/x-ustar"}, { "utz", "application/vnd.uiq.theme"}, { "uu", "text/x-uuencode"}, { "vcd", "application/x-cdlink"}, { "vcf", "text/x-vcard"}, { "vcg", "application/vnd.groove-vcard"}, { "vcs", "text/x-vcalendar"}, { "vcx", "application/vnd.vcx"}, { "vis", "application/vnd.visionary"}, { "viv", "video/vnd.vivo"}, { "vor", "application/vnd.stardivision.writer"}, { "vox", "application/x-authorware-bin"}, { "vrml", "x-world/x-vrml"}, { "vsd", "application/vnd.visio"}, { "vsf", "application/vnd.vsf"}, { "vss", "application/vnd.visio"}, { "vst", "application/vnd.visio"}, { "vsw", "application/vnd.visio"}, { "vtu", "model/vnd.vtu"}, { "vxml", "application/voicexml+xml"}, { "w3d", "application/x-director"}, { "wad", "application/x-doom"}, { "wav", "audio/x-wav"}, { "wax", "audio/x-ms-wax"}, { "wbmp", "image/vnd.wap.wbmp"}, { "wbs", "application/vnd.criticaltools.wbs+xml"}, { "wbxml", "application/vnd.wap.wbxml"}, { "wcm", "application/vnd.ms-works"}, { "wdb", "application/vnd.ms-works"}, { "wiz", "application/msword"}, { "wks", "application/vnd.ms-works"}, { "wma", "audio/x-ms-wma"}, { "wmd", "application/x-ms-wmd"}, { "wmf", "application/x-msmetafile"}, { "wmlc", "application/vnd.wap.wmlc"}, { "wmlsc", "application/vnd.wap.wmlscriptc"}, { "wmls", "text/vnd.wap.wmlscript"}, { "wml", "text/vnd.wap.wml"}, { "wm", "video/x-ms-wm"}, { "wmv", "video/x-ms-wmv"}, { "wmx", "video/x-ms-wmx"}, { "wmz", "application/x-ms-wmz"}, { "wpd", "application/vnd.wordperfect"}, { "wpl", "application/vnd.ms-wpl"}, { "wps", "application/vnd.ms-works"}, { "wqd", "application/vnd.wqd"}, { "wri", "application/x-mswrite"}, { "wrl", "x-world/x-vrml"}, { "wrz", "x-world/x-vrml"}, { "wsdl", "application/wsdl+xml"}, { "wspolicy", "application/wspolicy+xml"}, { "wtb", "application/vnd.webturbo"}, { "wvx", "video/x-ms-wvx"}, { "x32", "application/x-authorware-bin"}, { "x3d", "application/vnd.hzn-3d-crossword"}, { "xaf", "x-world/x-vrml"}, { "xap", "application/x-silverlight-app"}, { "xar", "application/vnd.xara"}, { "xbap", "application/x-ms-xbap"}, { "xbd", "application/vnd.fujixerox.docuworks.binder"}, { "xbm", "image/x-xbitmap"}, { "xdm", "application/vnd.syncml.dm+xml"}, { "xdp", "application/vnd.adobe.xdp+xml"}, { "xdw", "application/vnd.fujixerox.docuworks"}, { "xenc", "application/xenc+xml"}, { "xer", "application/patch-ops-error+xml"}, { "xfdf", "application/vnd.adobe.xfdf"}, { "xfdl", "application/vnd.xfdl"}, { "xht", "application/xhtml+xml"}, { "xhtml", "application/xhtml+xml"}, { "xhvml", "application/xv+xml"}, { "xif", "image/vnd.xiff"}, { "xla", "application/vnd.ms-excel"}, { "xlam", "application/vnd.ms-excel.addin.macroenabled.12"}, { "xlb", "application/vnd.ms-excel"}, { "xlc", "application/vnd.ms-excel"}, { "xlm", "application/vnd.ms-excel"}, { "xls", "application/vnd.ms-excel"}, { "xlsb", "application/vnd.ms-excel.sheet.binary.macroenabled.12"}, { "xlsm", "application/vnd.ms-excel.sheet.macroenabled.12"}, { "xlsx", "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"}, { "xlt", "application/vnd.ms-excel"}, { "xltm", "application/vnd.ms-excel.template.macroenabled.12"}, { "xltx", "application/vnd.openxmlformats-officedocument.spreadsheetml.template"}, { "xlw", "application/vnd.ms-excel"}, { "xml", "application/xml"}, { "xo", "application/vnd.olpc-sugar"}, { "xof", "x-world/x-vrml"}, { "xop", "application/xop+xml"}, { "xpdl", "application/xml"}, { "xpi", "application/x-xpinstall"}, { "xpm", "image/x-xpixmap"}, { "xpr", "application/vnd.is-xpr"}, { "xps", "application/vnd.ms-xpsdocument"}, { "xpw", "application/vnd.intercon.formnet"}, { "xpx", "application/vnd.intercon.formnet"}, { "xsl", "application/xml"}, { "xslt", "application/xslt+xml"}, { "xsm", "application/vnd.syncml+xml"}, { "xspf", "application/xspf+xml"}, { "xul", "application/vnd.mozilla.xul+xml"}, { "xvm", "application/xv+xml"}, { "xvml", "application/xv+xml"}, { "xwd", "image/x-xwindowdump"}, { "xyz", "chemical/x-xyz"}, { "z", "application/x-compress"}, { "zaz", "application/vnd.zzazz.deck+xml"}, { "zip", "application/zip"}, { "zir", "application/vnd.zul"}, { "zirz", "application/vnd.zul"}, { "zmm", "application/vnd.handheld-entertainment+xml"} }; public static String unfold(String s) { if (s == null) { return null; } return s.replaceAll("\r|\n", ""); } private static String decode(String s, Message message) { if (s == null) { return null; } else { return DecoderUtil.decodeEncodedWords(s, message); } } public static String unfoldAndDecode(String s) { return unfoldAndDecode(s, null); } public static String unfoldAndDecode(String s, Message message) { return decode(unfold(s), message); } // TODO implement proper foldAndEncode public static String foldAndEncode(String s) { return s; } /** * Returns the named parameter of a header field. If name is null the first * parameter is returned, or if there are no additional parameters in the * field the entire field is returned. Otherwise the named parameter is * searched for in a case insensitive fashion and returned. * * @param headerValue the header value * @param parameterName the parameter name * @return the value. if the parameter cannot be found the method returns null. */ public static String getHeaderParameter(String headerValue, String parameterName) { if (headerValue == null) { return null; } headerValue = headerValue.replaceAll("\r|\n", ""); String[] parts = headerValue.split(";"); if (parameterName == null && parts.length > 0) { return parts[0].trim(); } for (String part : parts) { if (parameterName != null && part.trim().toLowerCase(Locale.US).startsWith(parameterName.toLowerCase(Locale.US))) { String[] partParts = part.split("=", 2); if (partParts.length == 2) { String parameter = partParts[1].trim(); int len = parameter.length(); if (len >= 2 && parameter.startsWith("\"") && parameter.endsWith("\"")) { return parameter.substring(1, len - 1); } else { return parameter; } } } } return null; } public static Part findFirstPartByMimeType(Part part, String mimeType) throws MessagingException { if (part.getBody() instanceof Multipart) { Multipart multipart = (Multipart)part.getBody(); for (BodyPart bodyPart : multipart.getBodyParts()) { Part ret = MimeUtility.findFirstPartByMimeType(bodyPart, mimeType); if (ret != null) { return ret; } } } else if (isSameMimeType(part.getMimeType(), mimeType)) { return part; } return null; } /** * Returns true if the given mimeType matches the matchAgainst specification. * @param mimeType A MIME type to check. * @param matchAgainst A MIME type to check against. May include wildcards such as image/* or * * /*. * @return */ public static boolean mimeTypeMatches(String mimeType, String matchAgainst) { Pattern p = Pattern.compile(matchAgainst.replaceAll("\\*", "\\.\\*"), Pattern.CASE_INSENSITIVE); return p.matcher(mimeType).matches(); } public static boolean isDefaultMimeType(String mimeType) { return isSameMimeType(mimeType, DEFAULT_ATTACHMENT_MIME_TYPE); } public static Body createBody(InputStream in, String contentTransferEncoding, String contentType) throws IOException, MessagingException { if (contentTransferEncoding != null) { contentTransferEncoding = MimeUtility.getHeaderParameter(contentTransferEncoding, null); } BinaryTempFileBody tempBody; if (MimeUtil.isMessage(contentType)) { tempBody = new BinaryTempFileMessageBody(contentTransferEncoding); } else { tempBody = new BinaryTempFileBody(contentTransferEncoding); } OutputStream out = tempBody.getOutputStream(); try { IOUtils.copy(in, out); } finally { out.close(); } return tempBody; } /** * Get decoded contents of a body. * <p/> * Right now only some classes retain the original encoding of the body contents. Those classes have to implement * the {@link RawDataBody} interface in order for this method to decode the data delivered by * {@link Body#getInputStream()}. * <p/> * The ultimate goal is to get to a point where all classes retain the original data and {@code RawDataBody} can be * merged into {@link Body}. */ public static InputStream decodeBody(Body body) throws MessagingException { InputStream inputStream; if (body instanceof RawDataBody) { RawDataBody rawDataBody = (RawDataBody) body; String encoding = rawDataBody.getEncoding(); final InputStream rawInputStream = rawDataBody.getInputStream(); if (MimeUtil.ENC_7BIT.equalsIgnoreCase(encoding) || MimeUtil.ENC_8BIT.equalsIgnoreCase(encoding) || MimeUtil.ENC_BINARY.equalsIgnoreCase(encoding)) { inputStream = rawInputStream; } else if (MimeUtil.ENC_BASE64.equalsIgnoreCase(encoding)) { inputStream = new Base64InputStream(rawInputStream, false) { @Override public void close() throws IOException { super.close(); closeInputStreamWithoutDeletingTemporaryFiles(rawInputStream); } }; } else if (MimeUtil.ENC_QUOTED_PRINTABLE.equalsIgnoreCase(encoding)) { inputStream = new QuotedPrintableInputStream(rawInputStream) { @Override public void close() throws IOException { super.close(); closeInputStreamWithoutDeletingTemporaryFiles(rawInputStream); } }; } else { throw new RuntimeException("Encoding for RawDataBody not supported: " + encoding); } } else { inputStream = body.getInputStream(); } return inputStream; } public static void closeInputStreamWithoutDeletingTemporaryFiles(InputStream rawInputStream) throws IOException { if (rawInputStream instanceof BinaryTempFileBody.BinaryTempFileBodyInputStream) { ((BinaryTempFileBody.BinaryTempFileBodyInputStream) rawInputStream).closeWithoutDeleting(); } else { rawInputStream.close(); } } public static String getMimeTypeByExtension(String filename) { String returnedType = null; String extension = null; if (filename != null && filename.lastIndexOf('.') != -1) { extension = filename.substring(filename.lastIndexOf('.') + 1).toLowerCase(Locale.US); returnedType = android.webkit.MimeTypeMap.getSingleton().getMimeTypeFromExtension(extension); } // If the MIME type set by the user's mailer is application/octet-stream, try to figure // out whether there's a sane file type extension. if (returnedType != null && !isSameMimeType(returnedType, DEFAULT_ATTACHMENT_MIME_TYPE)) { return returnedType; } else if (extension != null) { for (String[] contentTypeMapEntry : MIME_TYPE_BY_EXTENSION_MAP) { if (contentTypeMapEntry[0].equals(extension)) { return contentTypeMapEntry[1]; } } } return DEFAULT_ATTACHMENT_MIME_TYPE; } public static String getExtensionByMimeType(String mimeType) { String lowerCaseMimeType = mimeType.toLowerCase(Locale.US); for (String[] contentTypeMapEntry : MIME_TYPE_BY_EXTENSION_MAP) { if (contentTypeMapEntry[1].equals(lowerCaseMimeType)) { return contentTypeMapEntry[0]; } } return null; } /** * Get a default content-transfer-encoding for use with a given content-type * when adding an unencoded attachment. It's possible that 8bit encodings * may later be converted to 7bit for 7bit transport. * <ul> * <li>null: base64 * <li>message/rfc822: 8bit * <li>message/*: 7bit * <li>multipart/signed: 7bit * <li>multipart/*: 8bit * <li>*&#47;*: base64 * </ul> * * @param type * A String representing a MIME content-type * @return A String representing a MIME content-transfer-encoding */ public static String getEncodingforType(String type) { if (type == null) { return (MimeUtil.ENC_BASE64); } else if (MimeUtil.isMessage(type)) { return (MimeUtil.ENC_8BIT); } else if (isSameMimeType(type, "multipart/signed") || isMessage(type)) { return (MimeUtil.ENC_7BIT); } else if (isMultipart(type)) { return (MimeUtil.ENC_8BIT); } else { return (MimeUtil.ENC_BASE64); } } public static boolean isMultipart(String mimeType) { return mimeType != null && mimeType.toLowerCase(Locale.US).startsWith("multipart/"); } public static boolean isMessage(String mimeType) { return isSameMimeType(mimeType, "message/rfc822"); } public static boolean isSameMimeType(String mimeType, String otherMimeType) { return mimeType != null && mimeType.equalsIgnoreCase(otherMimeType); } }
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.screens.social.hp.client.userpage; import javax.annotation.PostConstruct; import javax.enterprise.context.ApplicationScoped; import javax.enterprise.event.Event; import javax.enterprise.event.Observes; import javax.inject.Inject; import com.google.gwt.user.client.ui.Image; import org.jboss.errai.common.client.api.Caller; import org.jboss.errai.common.client.api.RemoteCallback; import org.jboss.errai.ioc.client.api.AfterInitialization; import org.jboss.errai.security.shared.api.identity.User; import org.ext.uberfire.social.activities.client.gravatar.GravatarBuilder; import org.ext.uberfire.social.activities.client.widgets.item.model.LinkCommandParams; import org.ext.uberfire.social.activities.client.widgets.pagination.Next; import org.ext.uberfire.social.activities.client.widgets.timeline.simple.model.SimpleSocialTimelineWidgetModel; import org.ext.uberfire.social.activities.client.widgets.userbox.UserBoxView; import org.ext.uberfire.social.activities.model.SocialPaged; import org.ext.uberfire.social.activities.model.SocialUser; import org.ext.uberfire.social.activities.service.SocialUserImageRepositoryAPI; import org.ext.uberfire.social.activities.service.SocialUserRepositoryAPI; import org.ext.uberfire.social.activities.service.SocialUserServiceAPI; import org.kie.workbench.common.screens.social.hp.client.homepage.DefaultSocialLinkCommandGenerator; import org.kie.workbench.common.screens.social.hp.client.homepage.events.LoadUserPageEvent; import org.kie.workbench.common.screens.social.hp.client.homepage.events.UserEditedEvent; import org.kie.workbench.common.screens.social.hp.client.homepage.events.UserHomepageSelectedEvent; import org.kie.workbench.common.screens.social.hp.client.resources.i18n.Constants; import org.kie.workbench.common.screens.social.hp.client.userpage.main.MainPresenter; import org.kie.workbench.common.screens.social.hp.client.userpage.main.header.HeaderPresenter; import org.kie.workbench.common.screens.social.hp.client.util.IconLocator; import org.kie.workbench.common.screens.social.hp.predicate.UserTimeLineOnlyUserActivityPredicate; import org.uberfire.client.annotations.WorkbenchPartTitle; import org.uberfire.client.annotations.WorkbenchPartView; import org.uberfire.client.annotations.WorkbenchScreen; import org.uberfire.client.mvp.PlaceManager; import org.uberfire.client.mvp.UberView; import org.uberfire.client.workbench.events.ChangeTitleWidgetEvent; import org.uberfire.lifecycle.OnStartup; import org.uberfire.mvp.ParameterizedCommand; import org.uberfire.mvp.PlaceRequest; @ApplicationScoped @WorkbenchScreen( identifier = "UserHomePageMainPresenter" ) public class UserHomePageMainPresenter { private PlaceRequest place; public interface View extends UberView<UserHomePageMainPresenter> { void setHeader( final HeaderPresenter header ); void setMain( MainPresenter main ); } @Inject private IconLocator iconLocator; @Inject private Event<ChangeTitleWidgetEvent> changeTitleWidgetEvent; @Inject private View view; @Inject private HeaderPresenter header; @Inject private MainPresenter mainPresenter; @Inject Caller<SocialUserRepositoryAPI> socialUserRepositoryAPI; @Inject Caller<SocialUserServiceAPI> socialUserServiceAPI; @Inject private User loggedUser; @Inject private PlaceManager placeManager; @Inject private Event<UserHomepageSelectedEvent> userHomepageSelectedEvent; @Inject private DefaultSocialLinkCommandGenerator linkCommandGenerator; //control race conditions due to assync system (cdi x UF lifecycle) private String lastUserOnpage; @PostConstruct public void loadContent() { initHeader(); } private void initHeader() { view.setHeader( header ); view.setMain( mainPresenter ); } @OnStartup public void onStartup( final PlaceRequest place ) { this.place = place; this.lastUserOnpage = loggedUser.getIdentifier(); setupUser( loggedUser.getIdentifier() ); } public void watchLoadUserPageEvent( @Observes LoadUserPageEvent event ) { this.lastUserOnpage = event.getSocialUserName(); setupUser( event.getSocialUserName() ); } public void watchUserHomepageSelectedEvent( @Observes UserHomepageSelectedEvent event ) { this.lastUserOnpage = event.getSocialUserName(); setupUser( event.getSocialUserName() ); } public void watchUserHomepageSelectedEvent( @Observes UserEditedEvent event ) { this.lastUserOnpage = event.getSocialUserName(); setupUser( event.getSocialUserName() ); } private boolean isThisUserStillCurrentActiveUser( SocialUser socialUser ) { return socialUser.getUserName().equalsIgnoreCase( lastUserOnpage ); } private void setupUser( final String username ) { final SocialPaged socialPaged = new SocialPaged( 5 ); socialUserRepositoryAPI.call( new RemoteCallback<SocialUser>() { @Override public void callback( SocialUser socialUser ) { if ( isThisUserStillCurrentActiveUser( socialUser ) ) { generateConnectionsList( socialUser ); setupMainWidget( socialUser, socialPaged ); } } } ).findSocialUser( username ); } private void setupMainWidget( SocialUser socialUser, SocialPaged socialPaged ) { String userName = ( socialUser != null && socialUser.getRealName() != null && !socialUser.getRealName().isEmpty() ) ? socialUser.getRealName() : socialUser.getUserName(); String title = Constants.INSTANCE.UserNameRecentActivities( userName ); changeTitleWidgetEvent.fire( new ChangeTitleWidgetEvent( place, title ) ); SimpleSocialTimelineWidgetModel model = new SimpleSocialTimelineWidgetModel( socialUser, new UserTimeLineOnlyUserActivityPredicate( socialUser ), placeManager, socialPaged ) .withIcons( iconLocator.getResourceTypes() ) .withOnlyMorePagination( new Next() {{ setText( Constants.INSTANCE.PaginationMore() ); }} ) .withLinkCommand( generateLinkCommand() ); mainPresenter.setup( model ); } private ParameterizedCommand<LinkCommandParams> generateLinkCommand() { return linkCommandGenerator.generateLinkCommand(); } private void generateConnectionsList( final SocialUser socialUser ) { header.clear(); for ( final String follower : socialUser.getFollowingName() ) { socialUserRepositoryAPI.call( new RemoteCallback<SocialUser>() { @Override public void callback( final SocialUser follower ) { if ( isThisUserStillCurrentActiveUser( socialUser ) ) { setupFollowerWidget( follower ); } } } ).findSocialUser( follower ); } if ( isThisUserStillCurrentActiveUser( socialUser ) & thereIsNoFollowers( socialUser ) ) { header.noConnection(); } } private void setupFollowerWidget( SocialUser socialUser ) { Image followerImage = GravatarBuilder.generate( socialUser, SocialUserImageRepositoryAPI.ImageSize.SMALL ); UserBoxView.RelationType relationType = findRelationTypeWithLoggedUser( socialUser ); header.addConnection( socialUser, relationType, followerImage, onClickEvent(), generateFollowUnfollowCommand( relationType ) ); } private ParameterizedCommand<String> onClickEvent() { return new ParameterizedCommand<String>() { @Override public void execute( String parameter ) { userHomepageSelectedEvent.fire( new UserHomepageSelectedEvent( parameter ) ); } }; } private ParameterizedCommand<String> generateFollowUnfollowCommand( final UserBoxView.RelationType relationType ) { return new ParameterizedCommand<String>() { @Override public void execute( final String parameter ) { if ( relationType == UserBoxView.RelationType.CAN_FOLLOW ) { socialUserServiceAPI.call().userFollowAnotherUser( loggedUser.getIdentifier(), parameter ); } else { socialUserServiceAPI.call().userUnfollowAnotherUser( loggedUser.getIdentifier(), parameter ); } userHomepageSelectedEvent.fire( new UserHomepageSelectedEvent( lastUserOnpage ) ); } }; } private UserBoxView.RelationType findRelationTypeWithLoggedUser( SocialUser socialUser ) { if ( socialUser.getUserName().equalsIgnoreCase( loggedUser.getIdentifier() ) ) { return UserBoxView.RelationType.ME; } else { return socialUser.getFollowersName().contains( loggedUser.getIdentifier() ) ? UserBoxView.RelationType.UNFOLLOW : UserBoxView.RelationType.CAN_FOLLOW; } } private boolean thereIsNoFollowers( SocialUser socialUser ) { return socialUser.getFollowingName() == null || socialUser.getFollowingName().isEmpty(); } @WorkbenchPartTitle public String getTitle() { return "UserHomePageMainPresenter"; } @WorkbenchPartView public UberView<UserHomePageMainPresenter> getView() { return view; } }
/** * Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com> */ package docs.io.japi; import java.net.InetSocketAddress; import java.util.LinkedList; import java.util.Queue; import akka.actor.ActorRef; import akka.actor.UntypedActor; import akka.event.Logging; import akka.event.LoggingAdapter; import akka.io.Tcp.CommandFailed; import akka.io.Tcp.ConnectionClosed; import akka.io.Tcp.Event; import akka.io.Tcp.Received; import akka.io.Tcp.Write; import akka.io.Tcp.WritingResumed; import akka.io.TcpMessage; import akka.japi.Procedure; import akka.util.ByteString; //#echo-handler public class EchoHandler extends UntypedActor { final LoggingAdapter log = Logging .getLogger(getContext().system(), getSelf()); final ActorRef connection; final InetSocketAddress remote; public static final long MAX_STORED = 100000000; public static final long HIGH_WATERMARK = MAX_STORED * 5 / 10; public static final long LOW_WATERMARK = MAX_STORED * 2 / 10; private static class Ack implements Event { public final int ack; public Ack(int ack) { this.ack = ack; } } public EchoHandler(ActorRef connection, InetSocketAddress remote) { this.connection = connection; this.remote = remote; // sign death pact: this actor stops when the connection is closed getContext().watch(connection); // start out in optimistic write-through mode getContext().become(writing); } private final Procedure<Object> writing = new Procedure<Object>() { @Override public void apply(Object msg) throws Exception { if (msg instanceof Received) { final ByteString data = ((Received) msg).data(); connection.tell(TcpMessage.write(data, new Ack(currentOffset())), getSelf()); buffer(data); } else if (msg instanceof Integer) { acknowledge((Integer) msg); } else if (msg instanceof CommandFailed) { final Write w = (Write) ((CommandFailed) msg).cmd(); connection.tell(TcpMessage.resumeWriting(), getSelf()); getContext().become(buffering((Ack) w.ack())); } else if (msg instanceof ConnectionClosed) { final ConnectionClosed cl = (ConnectionClosed) msg; if (cl.isPeerClosed()) { if (storage.isEmpty()) { getContext().stop(getSelf()); } else { getContext().become(closing); } } } } }; //#buffering protected Procedure<Object> buffering(final Ack nack) { return new Procedure<Object>() { private int toAck = 10; private boolean peerClosed = false; @Override public void apply(Object msg) throws Exception { if (msg instanceof Received) { buffer(((Received) msg).data()); } else if (msg instanceof WritingResumed) { writeFirst(); } else if (msg instanceof ConnectionClosed) { if (((ConnectionClosed) msg).isPeerClosed()) peerClosed = true; else getContext().stop(getSelf()); } else if (msg instanceof Integer) { final int ack = (Integer) msg; acknowledge(ack); if (ack >= nack.ack) { // otherwise it was the ack of the last successful write if (storage.isEmpty()) { if (peerClosed) getContext().stop(getSelf()); else getContext().become(writing); } else { if (toAck > 0) { // stay in ACK-based mode for a short while writeFirst(); --toAck; } else { // then return to NACK-based again writeAll(); if (peerClosed) getContext().become(closing); else getContext().become(writing); } } } } } }; } //#buffering //#closing protected Procedure<Object> closing = new Procedure<Object>() { @Override public void apply(Object msg) throws Exception { if (msg instanceof CommandFailed) { // the command can only have been a Write connection.tell(TcpMessage.resumeWriting(), getSelf()); getContext().become(closeResend, false); } else if (msg instanceof Integer) { acknowledge((Integer) msg); if (storage.isEmpty()) getContext().stop(getSelf()); } } }; protected Procedure<Object> closeResend = new Procedure<Object>() { @Override public void apply(Object msg) throws Exception { if (msg instanceof WritingResumed) { writeAll(); getContext().unbecome(); } else if (msg instanceof Integer) { acknowledge((Integer) msg); } } }; //#closing //#storage-omitted @Override public void onReceive(Object msg) throws Exception { // this method is not used due to become() } @Override public void postStop() { log.info("transferred {} bytes from/to [{}]", transferred, remote); } private long transferred; private int storageOffset = 0; private long stored = 0; private Queue<ByteString> storage = new LinkedList<ByteString>(); private boolean suspended = false; //#helpers protected void buffer(ByteString data) { storage.add(data); stored += data.size(); if (stored > MAX_STORED) { log.warning("drop connection to [{}] (buffer overrun)", remote); getContext().stop(getSelf()); } else if (stored > HIGH_WATERMARK) { log.debug("suspending reading at {}", currentOffset()); connection.tell(TcpMessage.suspendReading(), getSelf()); suspended = true; } } protected void acknowledge(int ack) { assert ack == storageOffset; assert !storage.isEmpty(); final ByteString acked = storage.remove(); stored -= acked.size(); transferred += acked.size(); storageOffset += 1; if (suspended && stored < LOW_WATERMARK) { log.debug("resuming reading"); connection.tell(TcpMessage.resumeReading(), getSelf()); suspended = false; } } //#helpers protected int currentOffset() { return storageOffset + storage.size(); } protected void writeAll() { int i = 0; for (ByteString data : storage) { connection.tell(TcpMessage.write(data, new Ack(storageOffset + i++)), getSelf()); } } protected void writeFirst() { connection.tell(TcpMessage.write(storage.peek(), new Ack(storageOffset)), getSelf()); } //#storage-omitted } //#echo-handler
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package edu.unibi.agbi.editor.business.service; import edu.unibi.agbi.editor.core.data.dao.ResultsDao; import edu.unibi.agbi.editor.core.data.entity.data.IDataArc; import edu.unibi.agbi.editor.core.data.entity.data.IDataNode; import edu.unibi.agbi.editor.core.data.entity.result.Simulation; import edu.unibi.agbi.editor.core.data.entity.result.ResultSet; import edu.unibi.agbi.editor.business.exception.ResultsException; import edu.unibi.agbi.editor.core.util.Utility; import edu.unibi.agbi.petrinet.entity.IElement; import edu.unibi.agbi.petrinet.entity.abstr.Element; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javafx.collections.ListChangeListener; import javafx.collections.ObservableList; import javafx.scene.chart.LineChart; import javafx.scene.chart.XYChart; import javafx.scene.control.TableView; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; /** * * @author PR */ @Service public class ResultService { private final ResultsDao resultsDao; @Autowired private MessengerService messengerService; @Value("${regex.value.fire}") private String valueFire; @Value("${regex.value.speed}") private String valueSpeed; @Value("${regex.value.token}") private String valueToken; @Value("${regex.value.tokenIn.actual}") private String valueTokenInActual; @Value("${regex.value.tokenIn.total}") private String valueTokenInTotal; @Value("${regex.value.tokenOut.actual}") private String valueTokenOutActual; @Value("${regex.value.tokenOut.total}") private String valueTokenOutTotal; @Autowired public ResultService(ResultsDao resultsDao) { this.resultsDao = resultsDao; this.resultsDao.getSimulationResults().addListener(new ListChangeListener() { @Override public void onChanged(ListChangeListener.Change change) { change.next(); if (change.wasAdded()) { try { Simulation simulation = ResultService.this.resultsDao.getSimulationResults() .get(ResultService.this.resultsDao.getSimulationResults().size() - 1); AutoAddData(simulation); } catch (ResultsException ex) { messengerService.addException("Exception while auto adding results data!", ex); } } } }); } /** * Adds data to the results dao. * * @param simulationResult * @return indicates wether data has been added or not */ public synchronized boolean add(Simulation simulationResult) { if (resultsDao.contains(simulationResult)) { return false; } resultsDao.add(simulationResult); return true; } /** * Gets the data for all performed simulations. * * @return */ public synchronized ObservableList<Simulation> getSimulationResults() { return resultsDao.getSimulationResults(); } /** * Adds the given line chart and related table item list to the storage. * * @param lineChart * @param tableView * @throws ResultsException */ public synchronized void add(LineChart lineChart, TableView tableView) throws ResultsException { if (resultsDao.contains(lineChart)) { throw new ResultsException("Line chart has already been stored! Cannot overwrite existing data list."); } resultsDao.add(lineChart, tableView); } /** * Attempts to add data to a line charts corresponding table. * * @param lineChart * @param data * @throws ResultsException */ public synchronized void add(LineChart lineChart, ResultSet data) throws ResultsException { if (resultsDao.contains(lineChart, data)) { throw new ResultsException("Duplicate entry for line chart"); } resultsDao.add(lineChart, data); } /** * Drops all data related to a line chart. * * @param lineChart the line chart that will be dropped */ public synchronized void drop(LineChart lineChart) { resultsDao.remove(lineChart); } /** * Drops the given data from the given chart and the table. * * @param lineChart the line chart that will be modified * @param data the data to be hidden and removed from the also given * chart */ public synchronized void drop(LineChart lineChart, ResultSet data) { hide(lineChart, data); resultsDao.remove(lineChart, data); } /** * Get simulation data related to a line chart. * * @param lineChart * @return */ public synchronized List<ResultSet> getChartData(LineChart lineChart) { return resultsDao.getChartResultsList(lineChart); } /** * * @param simulation * @param element * @param variable * @return * @throws ResultsException */ public ResultSet getResultSet(Simulation simulation, IElement element, String variable) throws ResultsException { ResultSet result; result = new ResultSet(simulation, element, variable); if (resultsDao.contains(result)) { result = resultsDao.get(result); } else { resultsDao.add(result); } updateSeries(result); result.getSeries().setName(getValueName(variable, simulation) + " (" + simulation.toStringShort() + ")"); return result; } /** * Get result sets directly related to a given simulation and element only. * Should only be used for showing results in the inspector, as the isShown * boolean will affect all viewers that have this data in their table. * * @param simulation * @param element * @return * @throws ResultsException */ public List<ResultSet> getResultSets(Simulation simulation, IElement element) throws ResultsException { List<ResultSet> sets; Set<String> variables; if (simulation == null || element == null) { return new ArrayList(); } sets = new ArrayList(); variables = simulation.getElementFilter(element); if (element.getElementType() == Element.Type.PLACE) { for (String var : variables) { if (var.matches(valueToken)) { // use only .t per default variables = new HashSet(); variables.add(var); break; } } } for (String variable : variables) { sets.add(getResultSet(simulation, element, variable)); } return sets; } /** * Removes the given data from the given chart. * * @param lineChart * @param data */ public synchronized void hide(LineChart lineChart, ResultSet data) { lineChart.getData().remove(data.getSeries()); data.setShown(false); } /** * Shows the given data in the given chart. * * @param lineChart * @param data * @throws ResultsException */ public synchronized void show(LineChart lineChart, ResultSet data) throws ResultsException { updateSeries(data); if (data.getSeries() != null) { if (!lineChart.getData().contains(data.getSeries())) { lineChart.getData().add(data.getSeries()); } data.setShown(true); } else { throw new ResultsException("No chart data available"); } } public synchronized void addForAutoAdding(LineChart lineChart, ResultSet data) { resultsDao.addForAutoAdding(lineChart, data); } public synchronized boolean containsForAutoAdding(LineChart lineChart, ResultSet data) { return resultsDao.containsForAutoAdding(lineChart, data); } public synchronized void removeFromAutoAdding(LineChart lineChart, ResultSet data) { resultsDao.removeFromAutoAdding(lineChart, data); } public synchronized void UpdateAutoAddedData() throws ResultsException { for (LineChart lineChart : resultsDao.getLineChartsWithAutoAdding()) { for (ResultSet data : resultsDao.getChartTable(lineChart).getItems()) { updateSeries(data); } resultsDao.getChartTable(lineChart).refresh(); } } public String getValueName(String value, Simulation simulation) { IDataArc arc; IDataNode node; String indexStr; int index; if (value.matches(valueFire)) { return "Firing"; } else if (value.matches(valueSpeed)) { return "Speed"; } else if (value.matches(valueToken)) { return "Token"; } else if (value.matches(valueTokenInActual)) { indexStr = Utility.parseSubstring(value, "[", "]"); if (indexStr != null) { index = Integer.parseInt(indexStr) - 1; node = (IDataNode) simulation.getFilterElement(value); if (node.getArcsIn().isEmpty()) { return "Token from <" + index + "> [ACTUAL]"; } else { arc = (IDataArc) node.getArcsIn().get(index); return "Token from " + arc.getSource().toString() + " [ACTUAL]"; } } else { return null; } } else if (value.matches(valueTokenInTotal)) { indexStr = Utility.parseSubstring(value, "[", "]"); if (indexStr != null) { index = Integer.parseInt(indexStr) - 1; node = (IDataNode) simulation.getFilterElement(value); if (node.getArcsIn().isEmpty()) { return "Token from <" + index + "> [TOTAL]"; } else { arc = (IDataArc) node.getArcsIn().get(index); return "Token from " + arc.getSource().toString() + " [TOTAL]"; } } else { return null; } } else if (value.matches(valueTokenOutActual)) { indexStr = Utility.parseSubstring(value, "[", "]"); if (indexStr != null) { index = Integer.parseInt(indexStr) - 1; node = (IDataNode) simulation.getFilterElement(value); if (node.getArcsOut().isEmpty()) { return "Token to <" + index + "> [ACTUAL]"; } else { arc = (IDataArc) node.getArcsOut().get(index); return "Token to " + arc.getTarget().toString() + " [ACTUAL]"; } } else { return null; } } else if (value.matches(valueTokenOutTotal)) { indexStr = Utility.parseSubstring(value, "[", "]"); if (indexStr != null) { index = Integer.parseInt(indexStr) - 1; node = (IDataNode) simulation.getFilterElement(value); if (node.getArcsOut().isEmpty()) { return "Token to <" + index + "> [TOTAL]"; } else { arc = (IDataArc) node.getArcsOut().get(index); return "Token to " + arc.getTarget().toString() + " [TOTAL]"; } } else { return null; } } else { return null; } } public Map<String, List<String>> getSharedValues(Simulation results, List<IElement> elements) { Map<String, List<String>> valuesTmp, valuesShared = null; Set<String> values, valuesRemoved; String name; for (IElement element : elements) { values = results.getElementFilter(element); valuesTmp = new HashMap(); for (String value : values) { name = getValueName(value, results); if (!valuesTmp.containsKey(name)) { valuesTmp.put(name, new ArrayList()); } valuesTmp.get(name).add(value); } if (valuesShared == null) { valuesShared = valuesTmp; } else { valuesRemoved = new HashSet(); for (String key : valuesShared.keySet()) { if (valuesTmp.containsKey(key)) { valuesShared.get(key).addAll(valuesTmp.get(key)); } else { valuesRemoved.add(key); } } for (String key : valuesRemoved) { valuesShared.remove(key); } } } return valuesShared; } private synchronized void AutoAddData(Simulation simulation) throws ResultsException { Map<String, Map<IElement, Set<String>>> modelsToAutoAdd; Map<IElement, Set<String>> elementsToAutoAdd; Set<String> valuesToAutoAdd; ResultSet data; // validate all active charts for (LineChart lineChart : resultsDao.getLineChartsWithAutoAdding()) { modelsToAutoAdd = resultsDao.getDataAutoAdd(lineChart); if (modelsToAutoAdd != null) { elementsToAutoAdd = modelsToAutoAdd.get(simulation.getDao().getModelId()); if (elementsToAutoAdd != null) { // validate elements chosen for auto adding to be available for (IElement elem : elementsToAutoAdd.keySet()) { valuesToAutoAdd = elementsToAutoAdd.get(elem); if (valuesToAutoAdd != null) { // validate values chosen for auto adding to be valid for (String valueToAutoAdd : valuesToAutoAdd) { if (simulation.getFilterElement(valueToAutoAdd) != null) { // create and add data to chart data = new ResultSet(simulation, elem, valueToAutoAdd); try { add(lineChart, data); } catch (ResultsException ex) { System.out.println("Duplicate results entry"); } show(lineChart, data); } } } } } } } } /** * Updates the series for the given data object. Loads data from the * simulation and adds all additional entries to the series. Updates the * related chart. * * @param resultSet * @throws ResultsException */ public synchronized void updateSeries(ResultSet resultSet) throws ResultsException { List<Object> data = resultSet.getData(); List<Object> time = resultSet.getSimulation().getTimeData(); int indexDataProcessed = resultSet.getDataProcessedIndex(); if (data == null) { throw new ResultsException(""); } XYChart.Series seriesOld = resultSet.getSeries(); if (seriesOld == null || data.size() > indexDataProcessed) { // update only if additional values available XYChart.Series seriesNew = new XYChart.Series(); if (seriesOld != null) { seriesNew.getData().addAll(seriesOld.getData()); } /** * Attach data to series. TODO replace by downsampling. */ for (int i = indexDataProcessed; i < data.size(); i++) { seriesNew.getData().add(new XYChart.Data( (Number) time.get(i), (Number) data.get(i) )); indexDataProcessed++; } resultSet.setDataProcessedIndex(indexDataProcessed); // Create label // if (resultSet.getElement().getName() != null // && !resultSet.getElement().getName().isEmpty()) { // seriesNew.setName("'" + resultSet.getElement().getName() + "' (" + resultSet.getSimulation().toStringShort() + ")"); // } else { seriesNew.setName("'" + resultSet.getElement().getId() + "' (" + resultSet.getSimulation().toStringShort() + ")"); // } // Replace in chart if (seriesOld != null) { XYChart chart = seriesOld.getChart(); if (chart != null) { chart.getData().remove(seriesOld); chart.getData().add(seriesNew); } } resultSet.setSeries(seriesNew); } } }
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.cache.impl.operation; import com.hazelcast.cache.impl.CacheDataSerializerHook; import com.hazelcast.cache.impl.CachePartitionSegment; import com.hazelcast.cache.impl.ICacheRecordStore; import com.hazelcast.cache.impl.ICacheService; import com.hazelcast.cache.impl.record.CacheRecord; import com.hazelcast.config.CacheConfig; import com.hazelcast.nio.ObjectDataInput; import com.hazelcast.nio.ObjectDataOutput; import com.hazelcast.nio.serialization.Data; import com.hazelcast.nio.serialization.IdentifiedDataSerializable; import com.hazelcast.spi.ObjectNamespace; import com.hazelcast.spi.Operation; import com.hazelcast.spi.ServiceNamespace; import com.hazelcast.util.Clock; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; /** * Replication operation is the data migration operation of {@link com.hazelcast.cache.impl.CacheRecordStore}. * <p> * <p>Cache record store's records and configurations will be migrated into their new nodes. * <p> * Steps; * <ul> * <li>Serialize all non expired data.</li> * <li>Deserialize the data and config.</li> * <li>Create the configuration in the new node service.</li> * <li>Insert each record into {@link ICacheRecordStore}.</li> * </ul> * </p> * <p><b>Note:</b> This operation is a per partition operation.</p> */ public class CacheReplicationOperation extends Operation implements IdentifiedDataSerializable { private final List<CacheConfig> configs = new ArrayList<CacheConfig>(); private final Map<String, Map<Data, CacheRecord>> data = new HashMap<String, Map<Data, CacheRecord>>(); private final CacheNearCacheStateHolder nearCacheStateHolder = new CacheNearCacheStateHolder(this); public CacheReplicationOperation() { } public final void prepare(CachePartitionSegment segment, Collection<ServiceNamespace> namespaces, int replicaIndex) { for (ServiceNamespace namespace : namespaces) { ObjectNamespace ns = (ObjectNamespace) namespace; ICacheRecordStore recordStore = segment.getRecordStore(ns.getObjectName()); if (recordStore == null) { continue; } CacheConfig cacheConfig = recordStore.getConfig(); if (cacheConfig.getTotalBackupCount() >= replicaIndex) { storeRecordsToReplicate(recordStore); } } configs.addAll(segment.getCacheConfigs()); nearCacheStateHolder.prepare(segment, namespaces); } protected void storeRecordsToReplicate(ICacheRecordStore recordStore) { data.put(recordStore.getName(), recordStore.getReadOnlyRecords()); } @Override public void beforeRun() throws Exception { // Migrate CacheConfigs first ICacheService service = getService(); for (CacheConfig config : configs) { service.putCacheConfigIfAbsent(config); } } @Override public void run() throws Exception { ICacheService service = getService(); for (Map.Entry<String, Map<Data, CacheRecord>> entry : data.entrySet()) { ICacheRecordStore cache = service.getOrCreateRecordStore(entry.getKey(), getPartitionId()); cache.clear(); Map<Data, CacheRecord> map = entry.getValue(); Iterator<Map.Entry<Data, CacheRecord>> iterator = map.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<Data, CacheRecord> next = iterator.next(); Data key = next.getKey(); CacheRecord record = next.getValue(); iterator.remove(); cache.putRecord(key, record); } } data.clear(); if (getReplicaIndex() == 0) { nearCacheStateHolder.applyState(); } } @Override public String getServiceName() { return ICacheService.SERVICE_NAME; } @Override protected void writeInternal(ObjectDataOutput out) throws IOException { int confSize = configs.size(); out.writeInt(confSize); for (CacheConfig config : configs) { out.writeObject(config); } int count = data.size(); out.writeInt(count); long now = Clock.currentTimeMillis(); for (Map.Entry<String, Map<Data, CacheRecord>> entry : data.entrySet()) { Map<Data, CacheRecord> cacheMap = entry.getValue(); int subCount = cacheMap.size(); out.writeInt(subCount); out.writeUTF(entry.getKey()); for (Map.Entry<Data, CacheRecord> e : cacheMap.entrySet()) { final Data key = e.getKey(); final CacheRecord record = e.getValue(); if (record.isExpiredAt(now)) { continue; } out.writeData(key); out.writeObject(record); } // Empty data will terminate the iteration for read in case // expired entries were found while serializing, since the // real subCount will then be different from the one written // before out.writeData(null); } nearCacheStateHolder.writeData(out); } @Override protected void readInternal(ObjectDataInput in) throws IOException { super.readInternal(in); int confSize = in.readInt(); for (int i = 0; i < confSize; i++) { final CacheConfig config = in.readObject(); configs.add(config); } int count = in.readInt(); for (int i = 0; i < count; i++) { int subCount = in.readInt(); String name = in.readUTF(); Map<Data, CacheRecord> m = new HashMap<Data, CacheRecord>(subCount); data.put(name, m); // subCount + 1 because of the DefaultData written as the last entry // which adds another Data entry at the end of the stream! for (int j = 0; j < subCount + 1; j++) { Data key = in.readData(); // Empty data received so reading can be stopped here since // since the real object subCount might be different from // the number on the stream due to found expired entries if (key == null || key.dataSize() == 0) { break; } CacheRecord record = in.readObject(); m.put(key, record); } } nearCacheStateHolder.readData(in); } public boolean isEmpty() { return configs.isEmpty() && data.isEmpty(); } Collection<CacheConfig> getConfigs() { return Collections.unmodifiableCollection(configs); } @Override public int getFactoryId() { return CacheDataSerializerHook.F_ID; } @Override public int getId() { return CacheDataSerializerHook.CACHE_REPLICATION; } }
package se.l4.silo.engine.index.search.internal; import java.io.IOException; import java.io.InputStream; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.document.StringField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.util.BytesRef; import reactor.core.publisher.Flux; import se.l4.exobytes.streaming.StreamingFormat; import se.l4.exobytes.streaming.StreamingInput; import se.l4.exobytes.streaming.Token; import se.l4.silo.StorageException; import se.l4.silo.engine.index.IndexDataUpdater; import se.l4.silo.engine.index.search.SearchField; import se.l4.silo.engine.index.search.SearchFieldDef; import se.l4.silo.engine.index.search.locales.LocaleSupport; import se.l4.silo.engine.index.search.locales.Locales; import se.l4.silo.engine.index.search.types.SearchFieldType; import se.l4.silo.index.search.SearchIndexException; public class SearchIndexDataUpdater<T> implements IndexDataUpdater { private final Locales locales; private final SearchIndexEncounterImpl<T> encounter; private final IndexWriter writer; private final IndexSearcherManager searchManager; private final CommitManager commitManager; public SearchIndexDataUpdater( Locales locales, SearchIndexEncounterImpl<T> encounter, IndexWriter writer, IndexSearcherManager searcherManager, CommitManager commitManager ) { this.locales = locales; this.encounter = encounter; this.writer = writer; this.searchManager = searcherManager; this.commitManager = commitManager; } @Override public long getLastHardCommit() { return commitManager.getHardCommit(); } @Override public Flux<Long> hardCommits() { return commitManager.getHardCommits(); } @Override public void clear() { try { writer.deleteAll(); commitManager.reinitialize(); writer.commit(); } catch(IOException e) { throw new SearchIndexException("Unable to clear index"); } } @Override public void apply(long op, long id, InputStream rawIn) throws IOException { int version = rawIn.read(); if(version != 0) { throw new StorageException("Unknown search index version encountered: " + version); } searchManager.willMutate(false); Document doc = new Document(); BytesRef idRef = new BytesRef(serializeId(id)); FieldType ft = new FieldType(); ft.setStored(true); ft.setTokenized(false); ft.setIndexOptions(IndexOptions.DOCS); doc.add(new Field("_:id", idRef, ft)); doc.add(new BinaryDocValuesField("_:id", idRef)); LocaleSupport defaultLangSupport = locales.get("en").get(); try(StreamingInput in = StreamingFormat.CBOR.createInput(rawIn)) { in.next(Token.VALUE); String rawLocale = in.readString(); doc.add(new Field("_:lang", rawLocale, StringField.TYPE_STORED)); // Resolve locale support LocaleSupport specificLanguageSupport = locales.getOrDefault(rawLocale); in.next(Token.LIST_START); while(in.peek() != Token.LIST_END) { in.next(Token.LIST_START); in.next(Token.VALUE); String fieldName = in.readString(); SearchField<T, ?> field = encounter.getField(fieldName); if(field == null) { in.skipNext(); continue; } if(in.peek() == Token.NULL) { in.next(); addField(doc, defaultLangSupport, specificLanguageSupport, field, null); } else if(in.peek() == Token.LIST_START) { // Stored a list of values, extract and index them in.next(Token.LIST_START); while(in.peek() != Token.LIST_END) { Object value = field.getDefinition().getType().read(in); addField(doc, defaultLangSupport, specificLanguageSupport, field, value); } in.next(Token.LIST_END); } else { Object value = field.getDefinition().getType().read(in); addField(doc, defaultLangSupport, specificLanguageSupport, field, value); } in.next(Token.LIST_END); } in.next(Token.LIST_END); } // Update the index Term idTerm = new Term("_:id", idRef); try { writer.updateDocument(idTerm, doc); } catch(IOException e) { throw new StorageException("Unable to update search index; " + e.getMessage(), e); } // Tell our commit policy that we have modified the index commitManager.indexModified(op); } private <V> void addField( Document document, LocaleSupport fallback, LocaleSupport current, SearchField<T, ?> field, V object ) { if(object == null) { // Store null = true String fieldName = encounter.nullName(field.getDefinition()); document.add(new Field(fieldName, NullFields.VALUE_NULL, NullFields.FIELD_TYPE)); return; } // Store null = false String fieldName = encounter.nullName(field.getDefinition()); document.add(new Field(fieldName, NullFields.VALUE_NON_NULL, NullFields.FIELD_TYPE)); // Index the actual field SearchFieldDef<?> def = field.getDefinition(); SearchFieldType type = ((SearchFieldType) def.getType()); if(type.isLocaleSupported() && def.isLanguageSpecific() && fallback != current) { type.create(new FieldCreationEncounterImpl<V>( encounter, document::add, (SearchField) field, current, object )); } type.create(new FieldCreationEncounterImpl<V>( encounter, document::add, (SearchField) field, fallback, object )); } @Override public void delete(long op, long id) { BytesRef idRef = new BytesRef(serializeId((id))); try { searchManager.willMutate(true); writer.deleteDocuments(new Term("_:id", idRef)); } catch(IOException e) { throw new StorageException("Unable to delete from search index; " + e.getMessage(), e); } // Tell our commit policy that we have modified the index commitManager.indexModified(op); } private static final byte[] serializeId(long id) { return new byte[] { (byte) id, (byte) (id >> 8), (byte) (id >> 16), (byte) (id >> 24), (byte) (id >> 32), (byte) (id >> 40), (byte) (id >> 48), (byte) (id >> 56) }; } }
/* * SoundManager.java * * Copyright 2007 William Robertson * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 */ package com.rattat.micro.game.aster; import com.rattat.micro.game.aster.mvc.GameListener; import com.rattat.micro.game.aster.mvc.Model; import com.rattat.micro.game.aster.mvc.ShipListener; import com.rattat.micro.sound.SimplePlayer; /** * An instance of the SoundManager class responds to * game and ship events to play sounds at the * appropriate times * * @author william@rattat.com */ public class SoundManager implements GameListener, ShipListener { /** * Flag for on or off */ private boolean on = false; /** * Key for the explosion sound */ public static final String SOUND_EXPLOSION = "explosion"; /** * Key for the shoot sound */ public static final String SOUND_SHOOT = "shoot"; /** * Key for the starting sound */ public static final String SOUND_START = "start"; //public static final String SOUND_END = "end"; /** * Key for the saucer sound */ public static final String SOUND_SAUCER = "saucer"; /** * Key for the theme music played in the menu */ public static final String SOUND_MENU = "menu"; //public static final String SOUND_EXPLOSION2 = "explosion2"; /** * Key for the sound when a saucer missile is bloecked by an esteroid */ public static final String SOUND_BLOCK = "block"; /** * The helper object for playing sounds */ private SimplePlayer player = null; /** * Create a new instance of SoundManager */ public SoundManager() { player = SimplePlayer.getInstance(); initSounds(); } /** * Load all the sounds that will be used in the application */ private void initSounds() { try { player.add(SOUND_MENU, "/menu.mid", "audio/midi"); player.add(SOUND_EXPLOSION, "/explosion.wav", "audio/x-wav"); player.add(SOUND_SHOOT, "/shoot.wav", "audio/x-wav"); player.add(SOUND_START, "/start.wav", "audio/x-wav"); //player.add(SOUND_END, "/end.wav", "audio/x-wav"); player.add(SOUND_SAUCER, "/saucer.wav", "audio/x-wav"); //player.add(SOUND_EXPLOSION2,"/explosion2.wav","audio/x-wav"); player.add(SOUND_BLOCK, "/block.wav","audio/x-wav"); } catch (Exception e) { System.err.println(e); } } /** * Respond to ship events and play sounds at appropriate times * * @param event * @param model * * @see com.rattat.micro.game.aster.mvc.ShipListener.shipEvent(int event, Model model) */ public void shipEvent(int event, Model model) { if ( ! on ) { return; } switch (event) { case EVENT_THRUST_START: break; case EVENT_THRUST_STOP: break; case EVENT_MISSILE_FIRED: player.play(SOUND_SHOOT); break; } } /** * Respond to game events and play sounds at appropriate times * * @param gameEvent * @param model * * @see com.rattat.micro.game.aster.mvc.GameListener.gameEvent(int gameEvent, Model model) */ public void gameEvent(int gameEvent, Model model) { if ( ! on ) { return; } switch (gameEvent) { case EVENT_GAME_START: break; case EVENT_GAME_END: //player.play(SOUND_END); break; case EVENT_ASTEROID_DESTROYED: player.play(SOUND_EXPLOSION); break; case EVENT_SHIP_DESTROYED: player.play(SOUND_EXPLOSION); break; case EVENT_SAUCER_DESTROYED: player.play(SOUND_EXPLOSION); break; case EVENT_SAUCER_APPEAR: player.play(SOUND_SAUCER); break; case EVENT_SAUCER_MISSILE_FIRED: player.play(SOUND_SHOOT); break; case EVENT_NEW_LEVEL: break; case EVENT_MISSILE_BLOCKED: player.play(SOUND_BLOCK); break; case EVENT_SHIP_MORTAL: player.play(SOUND_START); break; } } /** * Check if playing sounds is turned on or off * * @return True if on, false otherwise */ public boolean isOn() { return on; } /** * Turn sdounds on or off * * @param on */ public void setOn(boolean on) { this.on = on; } /** * Play a sound by name an number of times * * @param sound Name of the sound to play * @param num */ public void play(String sound, int num) { if ( on ) { player.play(sound, num); } } /** * Play a sound * * @param sound Name of the sound to play * @param num */ public void play(String sound) { if ( on ) { player.play(sound); } } /** * Stop playing a sound * * @param sound Name of the sound to stop playing the sound off */ public void stop(String sound) { player.stop(sound); } }
package com.pge.ev; import java.util.*; import java.io.*; public class FileReader { public static double[] readPeaksTieredRateFile() { Scanner inFile = new Scanner(""); String lineData = ""; Scanner inLine = new Scanner(""); try { inFile = new Scanner(new File(FileReader.class.getClassLoader().getResource("RateFiles/EV_Rate_Components_Peaks.txt").getPath())); lineData = inFile.nextLine();//Gets rid of header inLine = new Scanner(lineData); } catch(Exception e) { //e.printStackTrace(); System.out.println("Can't find tieredRateFile"); } double tempRate = 0; double[] returnArray = new double[6]; for(int i = 0; i < 6; i++) { inFile.nextLine(); inLine = new Scanner(inFile.nextLine()); while(inLine.hasNext()) { tempRate += inLine.nextDouble(); } returnArray[i] = tempRate; tempRate = 0; } inFile.close(); inLine.close(); return returnArray; } public static double[][] getBreakdownRates() { Scanner inFile = new Scanner(""); String lineData = ""; Scanner inLine = new Scanner(""); try { inFile = new Scanner(new File(FileReader.class.getClassLoader().getResource("RateFiles/EV_Rate_Components_Peaks.txt").getPath())); lineData = inFile.nextLine();//gets rid of header inLine = new Scanner(lineData); } catch(Exception e) { e.printStackTrace(); System.out.println("Can't find tieredRateFile"); } double[][] returnArray = new double[13][6];//Component-Peak for(int j = 0; j < 6; j++) { inFile.nextLine();//deletes the line that says the type of season and peak inLine = new Scanner(inFile.nextLine()); for(int i = 0; i < 13; i++) { returnArray[i][j] = inLine.nextDouble(); } } inLine.close(); inFile.close(); return returnArray; } public static String[][] readWinterPeakTimes() { Scanner inFile = new Scanner(""); String lineData = ""; Scanner inLine = new Scanner(""); inLine.useDelimiter(","); try { inFile = new Scanner(new File(FileReader.class.getClassLoader().getResource("RateFiles/EV_Winter_Peak_Times.csv").getPath())); lineData = inFile.nextLine();//gets rid of first line with the list of times inLine = new Scanner(lineData); inLine.useDelimiter(","); } catch(Exception e) { //e.printStackTrace(); System.out.println("Can't find Winter Peak Times File"); } String[][] returnArray = new String[7][24]; for(int i = 0; i < 7; i++) { inLine = new Scanner(inFile.nextLine()); inLine.useDelimiter(","); inLine.next();//gets rid of Day for(int j = 0; j < 24; j++) { returnArray[i][j] = inLine.next(); } } inLine.close(); inFile.close(); return returnArray; } public static String[][] readSummerPeakTimes() { Scanner inFile = new Scanner(""); String lineData = ""; Scanner inLine = new Scanner(""); inLine.useDelimiter(","); try { inFile = new Scanner(new File(FileReader.class.getClassLoader().getResource("RateFiles/EV_Summer_Peak_Times.csv").getPath())); lineData = inFile.nextLine();//gets rid of first line with the list of times inLine = new Scanner(lineData); inLine.useDelimiter(","); } catch(Exception e) { //e.printStackTrace(); System.out.println("Can't find Summer Peak Times File"); } String[][] returnArray = new String[7][24]; for(int i = 0; i < 7; i++) { inLine = new Scanner(inFile.nextLine()); inLine.useDelimiter(","); inLine.next();//gets rid of Day for(int j = 0; j < 24; j++) { returnArray[i][j] = inLine.next(); } } inLine.close(); inFile.close(); return returnArray; } public static String[][] readSummerPeakTimesDST()//Daylight savings time { Scanner inFile = new Scanner(""); String lineData = ""; Scanner inLine = new Scanner(""); inLine.useDelimiter(","); try { inFile = new Scanner(new File(FileReader.class.getClassLoader().getResource("RateFiles/EV_Summer_Peak_Times_DaylightSavings.csv").getPath())); lineData = inFile.nextLine();//gets rid of first line with the list of times inLine = new Scanner(lineData); inLine.useDelimiter(","); } catch(Exception e) { //e.printStackTrace(); System.out.println("Can't find Summer Peak Daylight Savings Time File"); } String[][] returnArray = new String[7][24]; for(int i = 0; i < 7; i++) { inLine = new Scanner(inFile.nextLine()); inLine.useDelimiter(","); inLine.next();//gets rid of Day for(int j = 0; j < 24; j++) { returnArray[i][j] = inLine.next(); } } inLine.close(); inFile.close(); return returnArray; } public static String[][] readWinterPeakTimesDST() { Scanner inFile = new Scanner(""); String lineData = ""; Scanner inLine = new Scanner(""); inLine.useDelimiter(","); try { inFile = new Scanner(new File(FileReader.class.getClassLoader().getResource("RateFiles/EV_Winter_Peak_Times_DaylightSavings.csv").getPath())); lineData = inFile.nextLine();//gets rid of first line with the list of times inLine = new Scanner(lineData); inLine.useDelimiter(","); } catch(Exception e) { //e.printStackTrace(); System.out.println("Can't find Winter Peak Daylight Savings Times File"); } String[][] returnArray = new String[7][24]; for(int i = 0; i < 7; i++) { inLine = new Scanner(inFile.nextLine()); inLine.useDelimiter(","); inLine.next();//gets rid of Day for(int j = 0; j < 24; j++) { returnArray[i][j] = inLine.next(); } } inLine.close(); inFile.close(); return returnArray; } public static List<String> readHolidaysFile() { Scanner inFile = new Scanner(""); String lineData = ""; Scanner inLine = new Scanner(""); List<String> list = new ArrayList<>(); try { inFile = new Scanner(new File(FileReader.class.getClassLoader().getResource("RateFiles/Holidays.txt").getPath())); lineData = inFile.nextLine();//gets rid of first line inLine = new Scanner(lineData); } catch(Exception e) { //e.printStackTrace(); System.out.println("Can't find Holidays File"); } while(inFile.hasNextLine()) { list.add(inFile.nextLine()); } inFile.close(); inLine.close(); return list; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.server.kerberos.shared.store; import java.io.IOException; import java.util.HashMap; import java.util.Map; import javax.security.auth.kerberos.KerberosPrincipal; import org.apache.directory.api.ldap.model.entry.Attribute; import org.apache.directory.api.ldap.model.entry.StringValue; import org.apache.directory.api.ldap.model.entry.Value; import org.apache.directory.api.ldap.model.exception.LdapException; import org.apache.directory.server.i18n.I18n; import org.apache.directory.shared.kerberos.KerberosTime; import org.apache.directory.shared.kerberos.codec.KerberosDecoder; import org.apache.directory.shared.kerberos.codec.types.EncryptionType; import org.apache.directory.shared.kerberos.codec.types.SamType; import org.apache.directory.shared.kerberos.components.EncryptionKey; import org.apache.directory.shared.kerberos.exceptions.KerberosException; /** * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a> */ public class PrincipalStoreEntryModifier { // principal private String distinguishedName; private String commonName; private KerberosPrincipal principal; private String realmName; // uidObject private String userId; // KDCEntry // must private int keyVersionNumber; // may private KerberosTime validStart; private KerberosTime validEnd; private KerberosTime passwordEnd; private int maxLife; private int maxRenew; private int kdcFlags; private SamType samType; private boolean disabled = false; private boolean lockedOut = false; private KerberosTime expiration = KerberosTime.INFINITY; private Map<EncryptionType, EncryptionKey> keyMap; /** * Returns the {@link PrincipalStoreEntry}. * * @return The {@link PrincipalStoreEntry}. */ public PrincipalStoreEntry getEntry() { return new PrincipalStoreEntry( distinguishedName, commonName, userId, principal, keyVersionNumber, validStart, validEnd, passwordEnd, maxLife, maxRenew, kdcFlags, keyMap, realmName, samType, disabled, lockedOut, expiration ); } /** * Sets whether the account is disabled. * * @param disabled */ public void setDisabled( boolean disabled ) { this.disabled = disabled; } /** * Sets whether the account is locked-out. * * @param lockedOut */ public void setLockedOut( boolean lockedOut ) { this.lockedOut = lockedOut; } /** * Sets the expiration time. * * @param expiration */ public void setExpiration( KerberosTime expiration ) { this.expiration = expiration; } /** * Sets the distinguished name (Dn). * * @param distinguishedName */ public void setDistinguishedName( String distinguishedName ) { this.distinguishedName = distinguishedName; } /** * Sets the common name (cn). * * @param commonName */ public void setCommonName( String commonName ) { this.commonName = commonName; } /** * Sets the user ID. * * @param userId */ public void setUserId( String userId ) { this.userId = userId; } /** * Sets the KDC flags. * * @param kdcFlags */ public void setKDCFlags( int kdcFlags ) { this.kdcFlags = kdcFlags; } /** * Sets the key map. * * @param keyMap */ public void setKeyMap( Map<EncryptionType, EncryptionKey> keyMap ) { this.keyMap = keyMap; } /** * Sets the key version number. * * @param keyVersionNumber */ public void setKeyVersionNumber( int keyVersionNumber ) { this.keyVersionNumber = keyVersionNumber; } /** * Sets the ticket maximum life time. * * @param maxLife */ public void setMaxLife( int maxLife ) { this.maxLife = maxLife; } /** * Sets the ticket maximum renew time. * * @param maxRenew */ public void setMaxRenew( int maxRenew ) { this.maxRenew = maxRenew; } /** * Sets the end-of-life for the password. * * @param passwordEnd */ public void setPasswordEnd( KerberosTime passwordEnd ) { this.passwordEnd = passwordEnd; } /** * Sets the principal. * * @param principal */ public void setPrincipal( KerberosPrincipal principal ) { this.principal = principal; } /** * Sets the realm. * * @param realmName */ public void setRealmName( String realmName ) { this.realmName = realmName; } /** * Sets the end of validity. * * @param validEnd */ public void setValidEnd( KerberosTime validEnd ) { this.validEnd = validEnd; } /** * Sets the start of validity. * * @param validStart */ public void setValidStart( KerberosTime validStart ) { this.validStart = validStart; } /** * Sets the single-use authentication (SAM) type. * * @param samType */ public void setSamType( SamType samType ) { this.samType = samType; } /** * Converts the ASN.1 encoded key set to a map of encryption types to encryption keys. * * @param krb5key * @return The map of encryption types to encryption keys. * @throws LdapException * @throws IOException */ public Map<EncryptionType, EncryptionKey> reconstituteKeyMap( Attribute krb5key ) throws KerberosException, LdapException { Map<EncryptionType, EncryptionKey> map = new HashMap<EncryptionType, EncryptionKey>(); for ( Value<?> val : krb5key ) { if ( val instanceof StringValue ) { throw new IllegalStateException( I18n.err( I18n.ERR_626 ) ); } byte[] encryptionKeyBytes = val.getBytes(); EncryptionKey encryptionKey = KerberosDecoder.decodeEncryptionKey( encryptionKeyBytes ); map.put( encryptionKey.getKeyType(), encryptionKey ); } return map; } }
/* * Copyright 2012 Harald Wellmann * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.ops4j.pax.exam.sample2.model; import java.util.ArrayList; import java.util.List; import javax.persistence.Entity; import javax.persistence.Id; import javax.persistence.JoinTable; import javax.persistence.Lob; import javax.persistence.ManyToMany; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import javax.persistence.Table; @Entity @Table(name = "movie") public class Movie { @Id private int id; private String imdbId; private String youtubeId; private String title; @Lob private String description; private String language; private String tagline; private String trailer; private Integer runtime; private String homepage; private String imageUrl; @ManyToOne private Director director; @ManyToMany @JoinTable(name = "movie_person") private List<Actor> actors = new ArrayList<Actor>(); @OneToMany(mappedBy = "movie") private List<Role> roles = new ArrayList<Role>(); @OneToMany(mappedBy = "movie") private List<Rating> ratings; /** * @return the id */ public int getId() { return id; } /** * @param id * the id to set */ public void setId(int id) { this.id = id; } /** * @return the imdbId */ public String getImdbId() { return imdbId; } /** * @param imdbId * the imdbId to set */ public void setImdbId(String imdbId) { this.imdbId = imdbId; } /** * @return the youtubeId */ public String getYoutubeId() { return youtubeId; } /** * @param youtubeId * the youtubeId to set */ public void setYoutubeId(String youtubeId) { this.youtubeId = youtubeId; } /** * @return the title */ public String getTitle() { return title; } /** * @param title * the title to set */ public void setTitle(String title) { this.title = title; } /** * @return the description */ public String getDescription() { return description; } /** * @param description * the description to set */ public void setDescription(String description) { this.description = description; } /** * @return the language */ public String getLanguage() { return language; } /** * @param language * the language to set */ public void setLanguage(String language) { this.language = language; } /** * @return the tagline */ public String getTagline() { return tagline; } /** * @param tagline * the tagline to set */ public void setTagline(String tagline) { this.tagline = tagline; } /** * @return the trailer */ public String getTrailer() { return trailer; } /** * @param trailer * the trailer to set */ public void setTrailer(String trailer) { this.trailer = trailer; } /** * @return the runtime */ public Integer getRuntime() { return runtime; } /** * @param runtime * the runtime to set */ public void setRuntime(Integer runtime) { this.runtime = runtime; } /** * @return the homepage */ public String getHomepage() { return homepage; } /** * @param homepage * the homepage to set */ public void setHomepage(String homepage) { this.homepage = homepage; } /** * @return the imageUrl */ public String getImageUrl() { return imageUrl; } /** * @param imageUrl * the imageUrl to set */ public void setImageUrl(String imageUrl) { this.imageUrl = imageUrl; } /** * @return the actors */ public List<Actor> getActors() { return actors; } /** * @param actors * the actors to set */ public void setActors(List<Actor> actors) { this.actors = actors; } /** * @return the roles */ public List<Role> getRoles() { return roles; } /** * @param roles * the roles to set */ public void setRoles(List<Role> roles) { this.roles = roles; } /** * @return the ratings */ public List<Rating> getRatings() { return ratings; } /** * @param ratings * the ratings to set */ public void setRatings(List<Rating> ratings) { this.ratings = ratings; } /** * @return the director */ public Director getDirector() { return director; } /** * @param director * the director to set */ public void setDirector(Director director) { this.director = director; } }
// Copyright 2015 Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.android; import com.google.common.base.Joiner; import com.google.common.base.Stopwatch; import com.google.common.collect.ImmutableList; import com.google.common.hash.Hashing; import com.google.devtools.build.android.Converters.DependencyAndroidDataListConverter; import com.google.devtools.build.android.Converters.ExistingPathConverter; import com.google.devtools.build.android.Converters.FullRevisionConverter; import com.google.devtools.build.android.Converters.PathConverter; import com.google.devtools.build.android.Converters.UnvalidatedAndroidDataConverter; import com.google.devtools.build.android.Converters.VariantConfigurationTypeConverter; import com.google.devtools.common.options.Converters.CommaSeparatedOptionListConverter; import com.google.devtools.common.options.Option; import com.google.devtools.common.options.OptionsBase; import com.google.devtools.common.options.OptionsParser; import com.google.devtools.common.options.TriState; import com.android.builder.core.AndroidBuilder; import com.android.builder.core.VariantConfiguration; import com.android.builder.model.AaptOptions; import com.android.ide.common.internal.LoggedErrorException; import com.android.ide.common.res2.MergingException; import com.android.sdklib.repository.FullRevision; import com.android.utils.StdLogger; import java.io.IOException; import java.nio.file.FileSystem; import java.nio.file.FileSystems; import java.nio.file.Path; import java.util.Collection; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.logging.Logger; /** * Provides an entry point for the resource processing using the AOSP build tools. * * <pre> * Example Usage: * java/com/google/build/android/AndroidResourceProcessingAction\ * --sdkRoot path/to/sdk\ * --aapt path/to/sdk/aapt\ * --annotationJar path/to/sdk/annotationJar\ * --adb path/to/sdk/adb\ * --zipAlign path/to/sdk/zipAlign\ * --androidJar path/to/sdk/androidJar\ * --manifest path/to/manifest\ * --primaryData path/to/resources:path/to/assets:path/to/manifest:path/to/R.txt * --data p/t/res1:p/t/assets1:p/t/1/AndroidManifest.xml:p/t/1/R.txt,\ * p/t/res2:p/t/assets2:p/t/2/AndroidManifest.xml:p/t/2/R.txt * --generatedSourcePath path/to/write/generated/sources * --packagePath path/to/write/archive.ap_ * --srcJarOutput path/to/write/archive.srcjar * </pre> */ public class AndroidResourceProcessingAction { private static final StdLogger STD_LOGGER = new StdLogger(com.android.utils.StdLogger.Level.WARNING); private static final Logger LOGGER = Logger.getLogger(AndroidResourceProcessingAction.class.getName()); /** Flag specifications for this action. */ public static final class Options extends OptionsBase { @Option(name = "apiVersion", defaultValue = "21.0.0", converter = FullRevisionConverter.class, category = "config", help = "ApiVersion indicates the version passed to the AndroidBuilder. ApiVersion must be" + " > 19.10 when defined.") // TODO(bazel-team): Determine what the API version changes in AndroidBuilder. public FullRevision apiVersion; @Option(name = "aapt", defaultValue = "null", converter = ExistingPathConverter.class, category = "tool", help = "Aapt tool location for resource packaging.") public Path aapt; @Option(name = "annotationJar", defaultValue = "null", converter = ExistingPathConverter.class, category = "tool", help = "Annotation Jar for builder invocations.") public Path annotationJar; @Option(name = "adb", defaultValue = "null", converter = ExistingPathConverter.class, category = "tool", help = "Path to adb for builder functions.") //TODO(bazel-team): Determine if this is completely necessary for running AndroidBuilder. public Path adb; @Option(name = "zipAlign", defaultValue = "null", converter = ExistingPathConverter.class, category = "tool", help = "Path to zipAlign for building apks.") public Path zipAlign; @Option(name = "androidJar", defaultValue = "null", converter = ExistingPathConverter.class, category = "tool", help = "Path to the android jar for resource packaging and building apks.") public Path androidJar; @Option(name = "primaryData", defaultValue = "null", converter = UnvalidatedAndroidDataConverter.class, category = "input", help = "The directory containing the primary resource directory. The contents will override" + " the contents of any other resource directories during merging. The expected format" + " is resources[|resources]:assets[|assets]:manifest") public UnvalidatedAndroidData primaryData; @Option(name = "data", defaultValue = "", converter = DependencyAndroidDataListConverter.class, category = "input", help = "Additional Data dependencies. These values will be used if not defined in the " + "primary resources. The expected format is " + "resources[#resources]:assets[#assets]:manifest:r.txt:symbols.txt" + "[,resources[#resources]:assets[#assets]:manifest:r.txt:symbols.txt]") public List<DependencyAndroidData> data; @Option(name = "generatedSourcePath", defaultValue = "null", converter = PathConverter.class, category = "output", help = "Path for generated sources.") public Path generatedSourcePath; @Option(name = "rOutput", defaultValue = "null", converter = PathConverter.class, category = "output", help = "Path to where the R.txt should be written.") public Path rOutput; @Option(name = "symbolsTxtOut", defaultValue = "null", converter = PathConverter.class, category = "output", help = "Path to where the symbolsTxt should be written.") public Path symbolsTxtOut; @Option(name = "packagePath", defaultValue = "null", converter = PathConverter.class, category = "output", help = "Path to the write the archive.") public Path packagePath; @Option(name = "proguardOutput", defaultValue = "null", converter = PathConverter.class, category = "output", help = "Path for the proguard file.") public Path proguardOutput; @Option(name = "srcJarOutput", defaultValue = "null", converter = PathConverter.class, category = "output", help = "Path for the generated java source jar.") public Path srcJarOutput; @Option(name = "packageType", defaultValue = "DEFAULT", converter = VariantConfigurationTypeConverter.class, category = "config", help = "Variant configuration type for packaging the resources." + " Acceptible values DEFAULT, LIBRARY, TEST") public VariantConfiguration.Type packageType; @Option(name = "densities", defaultValue = "", converter = CommaSeparatedOptionListConverter.class, category = "config", help = "A list densities to filter the resource drawables by.") public List<String> densities; @Option(name = "debug", defaultValue = "false", category = "config", help = "Indicates if it is a debug build.") public boolean debug; @Option(name = "resourceConfigs", defaultValue = "", converter = CommaSeparatedOptionListConverter.class, category = "config", help = "A list of resource config filters to pass to aapt.") public List<String> resourceConfigs; @Option(name = "useAaptCruncher", defaultValue = "auto", category = "config", help = "Use the legacy aapt cruncher, defaults to true for non-LIBRARY packageTypes. " + " LIBRARY packages do not benefit from the additional processing as the resources" + " will need to be reprocessed during the generation of the final apk. See" + " https://code.google.com/p/android/issues/detail?id=67525 for a discussion of the" + " different png crunching methods.") public TriState useAaptCruncher; @Option(name = "uncompressedExtensions", defaultValue = "", converter = CommaSeparatedOptionListConverter.class, category = "config", help = "A list of file extensions not to compress.") public List<String> uncompressedExtensions; @Option(name = "packageForR", defaultValue = "null", category = "config", help = "Custom java package to generate the R symbols files.") public String packageForR; @Option(name = "applicationId", defaultValue = "null", category = "config", help = "Custom application id (package manifest) for the packaged manifest.") public String applicationId; @Option(name = "versionName", defaultValue = "null", category = "config", help = "Version name to stamp into the packaged manifest.") public String versionName; @Option(name = "versionCode", defaultValue = "-1", category = "config", help = "Version code to stamp into the packaged manifest.") public int versionCode; @Option(name = "assetsToIgnore", defaultValue = "", converter = CommaSeparatedOptionListConverter.class, category = "config", help = "A list of assets extensions to ignore.") public List<String> assetsToIgnore; } private static Options options; public static void main(String[] args) { final Stopwatch timer = Stopwatch.createStarted(); OptionsParser optionsParser = OptionsParser.newOptionsParser(Options.class); optionsParser.parseAndExitUponError(args); options = optionsParser.getOptions(Options.class); FileSystem fileSystem = FileSystems.getDefault(); Path working = fileSystem.getPath("").toAbsolutePath(); Path mergedAssets = working.resolve("merged_assets"); Path mergedResources = working.resolve("merged_resources"); final AndroidResourceProcessor resourceProcessor = new AndroidResourceProcessor(STD_LOGGER); final AndroidSdkTools sdkTools = new AndroidSdkTools(options.apiVersion, options.aapt, options.annotationJar, options.adb, options.zipAlign, options.androidJar, STD_LOGGER); try { LOGGER.fine(String.format("Setup finished at %sms", timer.elapsed(TimeUnit.MILLISECONDS))); final ImmutableList<DirectoryModifier> modifiers = ImmutableList.of( new PackedResourceTarExpander(working.resolve("expanded"), working), new FileDeDuplicator(Hashing.murmur3_128(), working.resolve("deduplicated"), working)); final AndroidBuilder builder = sdkTools.createAndroidBuilder(); final MergedAndroidData mergedData = resourceProcessor.mergeData( options.primaryData, options.data, mergedResources, mergedAssets, modifiers, useAaptCruncher() ? builder.getAaptCruncher() : null, true); LOGGER.info(String.format("Merging finished at %sms", timer.elapsed(TimeUnit.MILLISECONDS))); final Path filteredResources = fileSystem.getPath("resources-filtered"); final Path densityManifest = fileSystem.getPath("manifest-filtered/AndroidManifest.xml"); final DensityFilteredAndroidData filteredData = mergedData.filter( new DensitySpecificResourceFilter(options.densities, filteredResources, working), new DensitySpecificManifestProcessor(options.densities, densityManifest)); LOGGER.info( String.format("Density filtering finished at %sms", timer.elapsed(TimeUnit.MILLISECONDS))); resourceProcessor.processResources( builder, options.packageType, options.debug, options.packageForR, new FlagAaptOptions(), options.resourceConfigs, options.applicationId, options.versionCode, options.versionName, filteredData, options.data, working.resolve("manifest"), options.generatedSourcePath, options.packagePath, options.proguardOutput); LOGGER.fine(String.format("appt finished at %sms", timer.elapsed(TimeUnit.MILLISECONDS))); if (options.srcJarOutput != null) { resourceProcessor.createSrcJar(options.generatedSourcePath, options.srcJarOutput); } if (options.rOutput != null) { resourceProcessor.copyRToOutput(options.generatedSourcePath, options.rOutput); } if (options.symbolsTxtOut != null) { resourceProcessor.copyRToOutput(options.generatedSourcePath, options.symbolsTxtOut); } LOGGER.fine(String.format("Packaging finished at %sms", timer.elapsed(TimeUnit.MILLISECONDS))); } catch (MergingException e) { LOGGER.log(java.util.logging.Level.SEVERE, "Error during merging resources", e); System.exit(1); } catch (IOException | InterruptedException | LoggedErrorException e) { LOGGER.log(java.util.logging.Level.SEVERE, "Error during processing resources", e); System.exit(2); } catch (Exception e) { LOGGER.log(java.util.logging.Level.SEVERE, "Unexpected", e); System.exit(3); } LOGGER.info(String.format("Resources processed in %sms", timer.elapsed(TimeUnit.MILLISECONDS))); // AOSP code can leave dangling threads. System.exit(0); } private static boolean useAaptCruncher() { // If the value was set, use that. if (options.useAaptCruncher != TriState.AUTO) { return options.useAaptCruncher == TriState.YES; } // By default png cruncher shouldn't be invoked on a library -- the work is just thrown away. return options.packageType != VariantConfiguration.Type.LIBRARY; } private static final class FlagAaptOptions implements AaptOptions { @Override public boolean getUseAaptPngCruncher() { return options.useAaptCruncher != TriState.NO; } @Override public Collection<String> getNoCompress() { if (!options.uncompressedExtensions.isEmpty()) { return options.uncompressedExtensions; } return null; } @Override public String getIgnoreAssets() { if (!options.assetsToIgnore.isEmpty()) { return Joiner.on(":").join(options.assetsToIgnore); } return null; } @Override public boolean getFailOnMissingConfigEntry() { return false; } } }
// Copyright 2012 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.collide.client.search; import com.google.collide.client.util.PathUtil; import collide.client.filetree.FileTreeModel; import collide.client.filetree.FileTreeNode; import com.google.collide.client.workspace.MockOutgoingController; import com.google.collide.dto.DirInfo; import com.google.collide.json.shared.JsonArray; import com.google.common.collect.ImmutableList; import com.google.gwt.junit.client.GWTTestCase; import com.google.gwt.regexp.shared.RegExp; /** * Tests to ensure the search file indexer returns correct results */ public class TreeWalkFileNameSearchImplTest extends GWTTestCase { @Override public String getModuleName() { return SearchTestUtils.BUILD_MODULE_NAME; } private RegExp regex(String query) { return RegExp.compile(query, "i"); } public void testNoMatches() { FileNameSearch indexer = TreeWalkFileNameSearchImpl.create(); // Setup the file tree model with the simple tree (a list of hello files) FileTreeModel model = getFileTree(buildSimpleTree()); indexer.setFileTreeModel(model); // Verify no matches JsonArray<PathUtil> results = indexer.getMatches(regex("nothello"), 5); assertEquals(0, results.size()); } public void testMaxResults() { FileNameSearch indexer = TreeWalkFileNameSearchImpl.create(); // Setup the file tree model with the simple tree (a list of hello files) FileTreeModel model = getFileTree(buildSimpleTree()); indexer.setFileTreeModel(model); // Verify no matches JsonArray<PathUtil> results = indexer.getMatches(regex("hello"), 2); assertEquals(2, results.size()); results = indexer.getMatches(regex("hello"), 3); assertEquals(3, results.size()); results = indexer.getMatches(regex("hello"), FileNameSearch.RETURN_ALL_RESULTS); assertEquals(4, results.size()); } public void testCorrectMatchesFound() { FileNameSearch indexer = TreeWalkFileNameSearchImpl.create(); // Setup the file tree model with the simple tree (a list of hello files) FileTreeModel model = getFileTree(buildComplexTree()); indexer.setFileTreeModel(model); // Verify correct matches JsonArray<PathUtil> results = indexer.getMatches(regex("world"), 2); assertEquals(2, results.size()); assertContainsPaths(ImmutableList.of("/src/world.js", "/src/world.html"), results); results = indexer.getMatches(regex("data"), 4); assertEquals(1, results.size()); assertEquals("data.txt", results.get(0).getBaseName()); } public void testSameFileNames() { FileNameSearch indexer = TreeWalkFileNameSearchImpl.create(); // Setup the file tree model with the simple tree (a list of hello files) FileTreeModel model = getFileTree(buildComplexTree()); indexer.setFileTreeModel(model); // Verify that two results are returned from two different directories JsonArray<PathUtil> results = indexer.getMatches(regex("hello"), 2); assertEquals(2, results.size()); // Returns the proper two files assertContainsPaths(ImmutableList.of("/hello.js", "/src/hello.html"), results); } public void testFindFilesRelativeToPath() { FileNameSearch indexer = TreeWalkFileNameSearchImpl.create(); // Setup the file tree model with the simple tree (a list of hello files) FileTreeModel model = getFileTree(buildComplexTree()); indexer.setFileTreeModel(model); // Verify that two results are returned from two different directories JsonArray<PathUtil> results = indexer.getMatchesRelativeToPath(new PathUtil("/src"), regex("hello"), 2); assertEquals(1, results.size()); // Returns the proper two files assertContainsPaths(ImmutableList.of("/src/hello.html"), results); } public void testNoCrashWithInvalidModel() { FileNameSearch indexer = TreeWalkFileNameSearchImpl.create(); // Check null filetree indexer.setFileTreeModel(null); JsonArray<PathUtil> results = indexer.getMatches(regex("haha"), 4); assertEquals(0, results.size()); // Crap file tree so we can test no crashing FileTreeModel model = new FileTreeModel(new MockOutgoingController()); indexer.setFileTreeModel(model); results = indexer.getMatches(regex("haha"), 4); assertEquals(0, results.size()); } /** * Verifies that all values in the {@code actual} array are present in the * {@code expected}. Also checks that arrays are the same length */ private void assertContainsPaths(ImmutableList<String> expected, JsonArray<PathUtil> actual) { assertEquals(expected.size(), actual.size()); for (int i = 0; i < actual.size(); i++) { if (!expected.contains(actual.get(i).getPathString())) { fail("Actual contains " + actual.get(i).getPathString() + " which is not present in expected"); } } } /** * Creates a file tree model given a directory structure */ private FileTreeModel getFileTree(DirInfo dir) { FileTreeNode root = FileTreeNode.transform(dir); FileTreeModel model = new FileTreeModel(new MockOutgoingController()); model.replaceNode(PathUtil.WORKSPACE_ROOT, root, "1"); return model; } private final native DirInfo buildSimpleTree() /*-{ return { // Root node is magic nodeType : @com.google.collide.dto.TreeNodeInfo::DIR_TYPE, id : "1", originId : "1", name : "root", files : [ { nodeType : @com.google.collide.dto.TreeNodeInfo::FILE_TYPE, id : "5", originId : "5", name : "hello.js", rootId : "2", path : "/hello.js", size : "1234" }, { nodeType : @com.google.collide.dto.TreeNodeInfo::FILE_TYPE, id : "6", originId : "6", name : "hello2.js", rootId : "2", path : "/hello2.js", size : "1234" }, { nodeType : @com.google.collide.dto.TreeNodeInfo::FILE_TYPE, id : "7", originId : "7", name : "hello3.js", rootId : "2", path : "/hello3.js", size : "1234" }, { nodeType : @com.google.collide.dto.TreeNodeInfo::FILE_TYPE, id : "8", originId : "8", name : "hello4.js", rootId : "2", path : "/hello4.js", size : "1234" } ], isComplete : true, subDirectories : [] }; }-*/; public final native DirInfo buildComplexTree() /*-{ return { // Root node is magic nodeType : @com.google.collide.dto.TreeNodeInfo::DIR_TYPE, id : "1", originId : "1", name : "root", files : [ { nodeType : @com.google.collide.dto.TreeNodeInfo::FILE_TYPE, id : "5", originId : "5", name : "hello.js", rootId : "2", path : "/hello.js", size : "1234" } ], isComplete : true, subDirectories : [ { nodeType : @com.google.collide.dto.TreeNodeInfo::DIR_TYPE, id : "2", originId : "2", name : "src", path : "/src", files : [ { nodeType : @com.google.collide.dto.TreeNodeInfo::FILE_TYPE, id : "7", originId : "7", name : "world.js", rootId : "2", path : "/src/world.js", size : "1234" }, { nodeType : @com.google.collide.dto.TreeNodeInfo::FILE_TYPE, id : "3", originId : "3", name : "hello.html", rootId : "2", path : "/src/hello.html", size : "1234" }, { nodeType : @com.google.collide.dto.TreeNodeInfo::FILE_TYPE, id : "8", originId : "8", name : "world.html", rootId : "2", path : "/src/world.html", size : "1234" } ], isComplete : true, subDirectories : [] }, { nodeType : @com.google.collide.dto.TreeNodeInfo::DIR_TYPE, id : "4", originId : "4", name : "res", path : "/res", files : [ { nodeType : @com.google.collide.dto.TreeNodeInfo::FILE_TYPE, id : "6", originId : "5", name : "data.txt", rootId : "4", path : "/res/data.txt", size : "1234" } ], isComplete : true, subDirectories : [] } ] }; }-*/; }
/** * This file is part of the JCROM project. * Copyright (C) 2008-2015 - All rights reserved. * Authors: Olafur Gauti Gudmundsson, Nicolas Dos Santos * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jcrom.modeshape; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.List; import javax.jcr.Node; import javax.jcr.Session; import org.jcrom.JcrFile; import org.jcrom.Jcrom; import org.jcrom.SessionFactory; import org.jcrom.SessionFactoryImpl; import org.jcrom.dao.TreeDAO; import org.jcrom.entities.DynamicObject; import org.jcrom.entities.LazyInterface; import org.jcrom.entities.LazyObject; import org.jcrom.entities.Tree; import org.jcrom.entities.TreeNode; import org.junit.Test; import org.modeshape.test.ModeShapeSingleUseTest; /** * * @author Olafur Gauti Gudmundsson * @author Nicolas Dos Santos */ public class TestLazyLoading extends ModeShapeSingleUseTest { @Test public void testLazyLoading() throws Exception { Jcrom jcrom = new Jcrom(true, true); jcrom.map(Tree.class).map(LazyObject.class); TreeNode homeNode = new TreeNode(); homeNode.setName("home"); TreeNode newsNode = new TreeNode(); newsNode.setName("news"); TreeNode productsNode = new TreeNode(); productsNode.setName("products"); TreeNode templateNode = new TreeNode(); templateNode.setName("template"); homeNode.addChild(newsNode); homeNode.addChild(productsNode); LazyInterface lazyObject1 = new LazyObject(); lazyObject1.setName("one"); lazyObject1.setString("a"); LazyInterface lazyObject2 = new LazyObject(); lazyObject2.setName("two"); lazyObject2.setString("b"); Tree tree = new Tree(); tree.setName("Tree"); tree.setPath("/"); tree.addChild(homeNode); tree.setTemplateNode(templateNode); tree.setLazyObject(lazyObject1); tree.addLazyObject(lazyObject1); tree.addLazyObject(lazyObject2); Node treeRootNode = jcrom.addNode(((Session) session).getRootNode(), tree); Tree fromNode = jcrom.fromNode(Tree.class, treeRootNode); assertEquals(tree.getChildren().size(), fromNode.getChildren().size()); assertEquals(lazyObject1.getString(), fromNode.getLazyObject().getString()); assertEquals(tree.getLazyObjects().size(), fromNode.getLazyObjects().size()); assertEquals(lazyObject2.getString(), fromNode.getLazyObjects().get(1).getString()); assertNull(fromNode.getStartNode()); TreeNode homeFromNode = fromNode.getChildren().get(0); assertTrue(homeFromNode.getChildren().size() == homeNode.getChildren().size()); assertTrue(homeFromNode.getChildren().get(0).getName().equals(newsNode.getName())); // add references fromNode.addFavourite(newsNode); fromNode.setStartNode(productsNode); jcrom.updateNode(treeRootNode, fromNode); Tree modifiedFromNode = jcrom.fromNode(Tree.class, treeRootNode); assertTrue(modifiedFromNode.getFavourites().size() == fromNode.getFavourites().size()); assertTrue(modifiedFromNode.getStartNode().getName().equals(productsNode.getName())); assertTrue(modifiedFromNode.getStartNode().getChildren().size() == productsNode.getChildren().size()); // Additional test for Issue 94 // Delete favorites modifiedFromNode.setFavourites(null); jcrom.updateNode(treeRootNode, modifiedFromNode); Tree remodifiedFromNode = jcrom.fromNode(Tree.class, treeRootNode); assertTrue(remodifiedFromNode.getFavourites().isEmpty()); } @Test public void testDynamicMaps() throws Exception { Jcrom jcrom = new Jcrom(true, true); jcrom.map(DynamicObject.class).map(TreeNode.class); TreeNode node1 = new TreeNode("node1"); TreeNode node2 = new TreeNode("node2"); List<Object> treeNodes = new ArrayList<Object>(); treeNodes.add(new TreeNode("multiNode1")); treeNodes.add(new TreeNode("multiNode2")); JcrFile file1 = TestMapping.createFile("file1.jpg"); JcrFile file2 = TestMapping.createFile("file2.jpg"); List<JcrFile> files = new ArrayList<JcrFile>(); files.add(TestMapping.createFile("multifile1.jpg")); files.add(TestMapping.createFile("multifile2.jpg")); DynamicObject dynamicObj = new DynamicObject(); dynamicObj.setName("dynamic"); String myNode1Name = "my_" + node1.getName(); dynamicObj.putSingleValueChild(myNode1Name, node1); String myNode2Name = "my_" + node2.getName(); dynamicObj.putSingleValueChild(myNode2Name, node2); dynamicObj.putMultiValueChild("many", treeNodes); dynamicObj.putSingleFile(file1.getName(), file1); dynamicObj.putSingleFile(file2.getName(), file2); dynamicObj.putMultiFile("manyFiles", files); Node newNode = jcrom.addNode(((Session) session).getRootNode(), dynamicObj); DynamicObject fromNode = jcrom.fromNode(DynamicObject.class, newNode); assertTrue(fromNode.getSingleValueChildren().size() == dynamicObj.getSingleValueChildren().size()); assertTrue(fromNode.getMultiValueChildren().size() == dynamicObj.getMultiValueChildren().size()); TreeNode node1FromList = (TreeNode) fromNode.getMultiValueChildren().get("many").get(0); assertEquals("multiNode1", node1FromList.getName()); TreeNode node2FromList = (TreeNode) fromNode.getMultiValueChildren().get("many").get(1); assertEquals("multiNode2", node2FromList.getName()); TreeNode node1FromNode = (TreeNode) fromNode.getSingleValueChildren().get(myNode1Name); assertTrue(node1FromNode.getName().equals(node1.getName())); TreeNode node2FromNode = (TreeNode) fromNode.getSingleValueChildren().get(myNode2Name); assertTrue(node2FromNode.getName().equals(node2.getName())); assertTrue(fromNode.getSingleFiles().size() == dynamicObj.getSingleFiles().size()); assertTrue(fromNode.getMultiFiles().size() == dynamicObj.getMultiFiles().size()); assertTrue(fromNode.getSingleFiles().get(file1.getName()).getMimeType().equals(file1.getMimeType())); assertTrue(fromNode.getSingleFiles().get(file2.getName()).getMimeType().equals(file2.getMimeType())); assertTrue(fromNode.getMultiFiles().get("manyFiles").get(0).getName().equals("multifile1.jpg")); assertTrue(fromNode.getMultiFiles().get("manyFiles").get(1).getName().equals("multifile2.jpg")); TreeNode ref1 = new TreeNode("ref1"); TreeNode ref2 = new TreeNode("ref2"); jcrom.addNode(((Session) session).getRootNode(), ref1); jcrom.addNode(((Session) session).getRootNode(), ref2); List<Object> multiRefs = new ArrayList<Object>(); multiRefs.add(ref1); multiRefs.add(ref2); fromNode.putSingleReference(ref1.getName(), ref1); fromNode.putSingleReference(ref2.getName(), ref2); fromNode.putMultiReference("manyRefs", multiRefs); jcrom.updateNode(newNode, fromNode); session.save(); DynamicObject updatedNode = jcrom.fromNode(DynamicObject.class, newNode); assertTrue(updatedNode.getSingleReferences().size() == fromNode.getSingleReferences().size()); assertTrue(updatedNode.getMultiReferences().size() == fromNode.getMultiReferences().size()); TreeNode ref1FromList = (TreeNode) updatedNode.getMultiReferences().get("manyRefs").get(0); assertTrue(ref1FromList.getName().equals(ref1.getName())); TreeNode ref2FromList = (TreeNode) updatedNode.getMultiReferences().get("manyRefs").get(1); assertTrue(ref2FromList.getName().equals(ref2.getName())); TreeNode ref1FromNode = (TreeNode) updatedNode.getSingleReferences().get(ref1.getName()); assertTrue(ref1FromNode.getName().equals(ref1.getName())); TreeNode ref2FromNode = (TreeNode) updatedNode.getSingleReferences().get(ref2.getName()); assertTrue(ref2FromNode.getName().equals(ref2.getName())); } @Test public void testLazyLoadingWithSessionFactory() throws Exception { SessionFactory sessionFactory = new SessionFactoryImpl(repository, null); Jcrom jcrom = new Jcrom(true, true); jcrom.map(Tree.class).map(LazyObject.class); jcrom.setSessionFactory(sessionFactory); TreeDAO dao = new TreeDAO(jcrom); TreeNode homeNode = new TreeNode(); homeNode.setName("home"); TreeNode newsNode = new TreeNode(); newsNode.setName("news"); TreeNode productsNode = new TreeNode(); productsNode.setName("products"); TreeNode templateNode = new TreeNode(); templateNode.setName("template"); homeNode.addChild(newsNode); homeNode.addChild(productsNode); LazyInterface lazyObject1 = new LazyObject(); lazyObject1.setName("one"); lazyObject1.setString("a"); LazyInterface lazyObject2 = new LazyObject(); lazyObject2.setName("two"); lazyObject2.setString("b"); Tree tree = new Tree(); tree.setName("Tree"); tree.setPath("/"); tree.addChild(homeNode); tree.setTemplateNode(templateNode); tree.setLazyObject(lazyObject1); tree.addLazyObject(lazyObject1); tree.addLazyObject(lazyObject2); Tree createdTree = dao.create(tree); //Node treeRootNode = jcrom.addNode(((Session) session).getRootNode(), tree); Tree loadedTree = dao.loadById(createdTree.getUuid()); //Tree fromNode = jcrom.fromNode(Tree.class, treeRootNode); assertEquals(tree.getChildren().size(), loadedTree.getChildren().size()); assertEquals(lazyObject1.getString(), loadedTree.getLazyObject().getString()); assertEquals(tree.getLazyObjects().size(), loadedTree.getLazyObjects().size()); assertEquals(lazyObject2.getString(), loadedTree.getLazyObjects().get(1).getString()); assertNull(loadedTree.getStartNode()); TreeNode homeFromNode = loadedTree.getChildren().get(0); assertTrue(homeFromNode.getChildren().size() == homeNode.getChildren().size()); assertTrue(homeFromNode.getChildren().get(0).getName().equals(newsNode.getName())); // add references loadedTree.addFavourite(newsNode); loadedTree.setStartNode(productsNode); loadedTree = dao.update(loadedTree); //jcrom.updateNode(treeRootNode, fromNode); Tree modifiedTree = dao.loadById(createdTree.getUuid()); //Tree modifiedFromNode = jcrom.fromNode(Tree.class, treeRootNode); assertTrue(modifiedTree.getFavourites().size() == loadedTree.getFavourites().size()); assertTrue(modifiedTree.getStartNode().getName().equals(productsNode.getName())); assertTrue(modifiedTree.getStartNode().getChildren().size() == productsNode.getChildren().size()); } }
/* * This file is released under terms of BSD license * See LICENSE file for more information * @author Mikhail Zhigun */ package clawfc; import static clawfc.Utils.ASCII_NEWLINE_VALUE; import static clawfc.Utils.copy; import static clawfc.Utils.recreateDir; import static clawfc.Utils.sprintf; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import clawfc.Configuration.FortranCompilerVendor; import clawfc.depscan.FortranIncludesResolver; import clawfc.depscan.PreprocessorOutputScanner; import clawfc.utils.AsciiArrayIOStream; import clawfc.utils.Subprocess; import clawfc.utils.SubprocessFailed; /** * <code>Preprocessor</code> encapsulates details of calling C preprocessor. * */ public class Preprocessor { public static class PreprocessorInfo { final String cmd; final FortranCompilerVendor vendor; final String version; final List<String> flags; final String getVersionFlag; final String macro; final boolean supportsRedirection; public PreprocessorInfo(String cmd, FortranCompilerVendor vendor) throws Exception { this.cmd = cmd; this.vendor = vendor; switch (vendor) { case CRAY: { flags = Collections.unmodifiableList(Arrays.asList("-eP", "-hnoomp")); macro = "_CRAYFTN"; supportsRedirection = false; getVersionFlag = "--version"; break; } case INTEL: { flags = Collections.unmodifiableList(Arrays.asList("-preprocess-only", "-E")); macro = "__ICC"; supportsRedirection = true; getVersionFlag = "--version"; break; } case GNU: { flags = Collections.unmodifiableList(Arrays.asList("-E", "-cpp")); macro = "_GNU"; supportsRedirection = true; getVersionFlag = "--version"; break; } case NAG: { flags = Collections.unmodifiableList(Arrays.asList("-Wp,-w,-P", "-F", "-fpp")); macro = "NAGFOR"; supportsRedirection = true; getVersionFlag = "-V"; break; } case PORTLAND: { flags = Collections.unmodifiableList(Arrays.asList("-E", "-cpp")); macro = "_PGI"; supportsRedirection = true; getVersionFlag = "--version"; break; } default: throw new RuntimeException("Unknown compiler type"); } ; this.version = Utils.getCmdOutput(cmd, getVersionFlag); } } final PreprocessorInfo info; final List<String> cmdArgsTemplate; class ThreadLocalData { public final Path workingDir; public final FortranIncludesResolver includesResolver; public final PreprocessorOutputScanner outputScanner; public final AddIgnoreDirectiveFilter addIgnoreFilter; public final TrailingBackslashCommentsFilter trailingBSFilter; public ThreadLocalData(Path workingDir) throws Exception { final long threadId = Thread.currentThread().getId(); this.workingDir = workingDir.resolve(sprintf("pp/%s", threadId)); includesResolver = new FortranIncludesResolver(); outputScanner = new PreprocessorOutputScanner(); addIgnoreFilter = new AddIgnoreDirectiveFilter(); trailingBSFilter = new TrailingBackslashCommentsFilter(); } } final ThreadLocal<ThreadLocalData> threadLocalData; final Path driverTempDir; final List<Path> ppIncSearchPath; public Preprocessor(Configuration cfg, Options opts, Path driverTempDir) throws Exception { String ppCmd = opts.fortranCompilerCmd(); if (ppCmd == null) { ppCmd = cfg.defaultFortranCompilerCmd(); } FortranCompilerVendor ppType = opts.fortranCompilerType(); if (ppType == null) { ppType = cfg.defaultFortranCompilerVendor(); } info = new PreprocessorInfo(ppCmd, ppType); cmdArgsTemplate = Collections .unmodifiableList(prepareArgs(info, opts.acceleratorDirectiveLanguage(), opts.predefinedMacros())); this.threadLocalData = new ThreadLocal<ThreadLocalData>(); this.driverTempDir = driverTempDir; ppIncSearchPath = opts.preprocessorIncludeDirs(); } public static class Failed extends SubprocessFailed { public Failed(List<String> args, InputStream stdin, InputStream stderr) throws IOException { super(args, stdin, stderr, null); } } static final Set<String> FORTRAN_FILE_EXTENSIONS_SET = Collections .unmodifiableSet(new HashSet<String>(Arrays.asList(Utils.FORTRAN_FILE_EXTENSIONS))); public static String outputFilename(Path inputFile) { String basename = inputFile.getFileName().toString(); int i = basename.lastIndexOf('.'); if (i != -1) { String ext = basename.substring(i + 1); if (FORTRAN_FILE_EXTENSIONS_SET.contains(ext)) { return basename.substring(0, i) + ".pp." + ext; } } return basename + ".pp"; } static Path internalOutputFilePath(PreprocessorInfo info, Path inFilePath, Path workingDir) throws Exception { switch (info.vendor) { case CRAY: return workingDir.resolve(inFilePath.getFileName() + ".i"); default: throw new Exception("Not supported"); } } public static List<String> prepareArgs(PreprocessorInfo info, String accDirLanguage, List<String> predefinedMacros) throws Exception { List<String> args = new ArrayList<String>(); args.add(info.cmd); args.add("-D_CLAW"); args.addAll(info.flags); args.add("-D" + info.macro); if (accDirLanguage != null) { accDirLanguage = accDirLanguage.toLowerCase().trim(); switch (accDirLanguage) { case "acc": case "openacc": { args.add("-D_OPENACC"); break; } case "openmp": case "omp": { args.add("-D_OPENMP"); break; } case "none": { break; } default: { throw new Exception(sprintf("Unknown accelerator directive language \"%s\"", accDirLanguage)); } } } for (String macro : predefinedMacros) { args.add("-D" + macro); } return args; } public static List<String> prepareIncDirsArgs(Path inFilepath, List<Path> ppIncludeDirs) throws Exception { List<String> args = new ArrayList<String>(); final Path inFilePathDir = inFilepath.getParent(); args.add("-I" + inFilePathDir.toString()); for (Path dir : ppIncludeDirs) { if (!dir.isAbsolute()) { throw new Exception("Preprocessor include directories should be given with absolute paths." + sprintf(" \"%s\" does not satisfy the restriction", dir)); } args.add("-I" + dir.toString()); } return args; } public static AsciiArrayIOStream run(final Path inputFilePath, Set<Path> outIncFilePaths, Path workingDir, PreprocessorInfo info, List<String> cmdArgsTemplate, FortranIncludesResolver includesResolver, List<Path> ppIncSearchPath, PreprocessorOutputScanner scanner, AddIgnoreDirectiveFilter addIgnoreFilter, TrailingBackslashCommentsFilter trailingBSFilter) throws Exception, Failed { recreateDir(workingDir); if (outIncFilePaths != null) { outIncFilePaths.clear(); } if (!inputFilePath.isAbsolute()) { throw new Exception("Input source file should be given with absolute path. " + sprintf(" \"%s\" does not satisfy the requirement", inputFilePath)); } final Path intInputFilePath; if (trailingBSFilter != null) { final Path notrailingBSFilePath = workingDir.resolve(inputFilePath.getFileName()); try (InputStream in = Files.newInputStream(inputFilePath); AsciiArrayIOStream bufNoTrailingBS = new AsciiArrayIOStream(); OutputStream outNoTrailingBS = Files.newOutputStream(notrailingBSFilePath)) { trailingBSFilter.run(in, bufNoTrailingBS); copy(bufNoTrailingBS.getAsInputStreamUnsafe(), outNoTrailingBS); } intInputFilePath = notrailingBSFilePath; } else { intInputFilePath = inputFilePath; } List<String> args = new ArrayList<String>(); args.addAll(cmdArgsTemplate); args.addAll(prepareIncDirsArgs(inputFilePath, ppIncSearchPath)); args.add(intInputFilePath.toString()); // ------------------------------------------ final AsciiArrayIOStream ppStdout = new AsciiArrayIOStream(); final AsciiArrayIOStream ppStderr = new AsciiArrayIOStream(); final int retCode = Subprocess.call(args, workingDir, ppStdout, ppStderr); if (retCode == 0) { AsciiArrayIOStream bufPP; if (info.supportsRedirection) { bufPP = ppStdout; } else { Path outFilePath = internalOutputFilePath(info, inputFilePath, workingDir); bufPP = new AsciiArrayIOStream(outFilePath); Files.delete(outFilePath); } final Byte lastChr = bufPP.getChr(bufPP.size() - 1); if (lastChr != null && lastChr != ASCII_NEWLINE_VALUE) { bufPP.write(ASCII_NEWLINE_VALUE); } AsciiArrayIOStream bufNoMarkers = new AsciiArrayIOStream(); Set<Path> resIncFilePaths = scanner.run(bufPP.getAsInputStreamUnsafe(), bufNoMarkers); bufPP = null; resIncFilePaths.remove(intInputFilePath); AsciiArrayIOStream bufWithIgnore = new AsciiArrayIOStream(); addIgnoreFilter.run(bufNoMarkers.getAsInputStreamUnsafe(), bufWithIgnore); bufNoMarkers = null; AsciiArrayIOStream bufNoFtnInc = new AsciiArrayIOStream(); Set<Path> ftnIncFilePaths = includesResolver.run(inputFilePath, bufWithIgnore, bufNoFtnInc, ppIncSearchPath); resIncFilePaths.addAll(ftnIncFilePaths); if (outIncFilePaths != null) { outIncFilePaths.addAll(resIncFilePaths); } return bufNoFtnInc; } else { try (InputStream pStderrStrm = ppStderr.getAsInputStreamUnsafe()) { throw new Failed(args, null, pStderrStrm); } } } public AsciiArrayIOStream run(Path inputFilePath, Set<Path> outIncFilePaths) throws Failed, Exception { ThreadLocalData lData = threadLocalData.get(); if (lData == null) { lData = new ThreadLocalData(Files.createTempDirectory(driverTempDir, "fpp")); threadLocalData.set(lData); } return run(inputFilePath, outIncFilePaths, lData.workingDir, info, cmdArgsTemplate, lData.includesResolver, ppIncSearchPath, lData.outputScanner, lData.addIgnoreFilter, lData.trailingBSFilter); } }
/* * Copyright (C) 2006 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.app; import java.util.ArrayList; import android.annotation.CallSuper; import android.content.ComponentCallbacks; import android.content.ComponentCallbacks2; import android.content.Context; import android.content.ContextWrapper; import android.content.Intent; import android.content.res.Configuration; import android.os.Bundle; /** * Base class for maintaining global application state. You can provide your own * implementation by creating a subclass and specifying the fully-qualified name * of this subclass as the <code>"android:name"</code> attribute in your * AndroidManifest.xml's <code>&lt;application&gt;</code> tag. The Application * class, or your subclass of the Application class, is instantiated before any * other class when the process for your application/package is created. * * <p class="note"><strong>Note: </strong>There is normally no need to subclass * Application. In most situations, static singletons can provide the same * functionality in a more modular way. If your singleton needs a global * context (for example to register broadcast receivers), include * {@link android.content.Context#getApplicationContext() Context.getApplicationContext()} * as a {@link android.content.Context} argument when invoking your singleton's * <code>getInstance()</code> method. * </p> */ public class Application extends ContextWrapper implements ComponentCallbacks2 { private ArrayList<ComponentCallbacks> mComponentCallbacks = new ArrayList<ComponentCallbacks>(); private ArrayList<ActivityLifecycleCallbacks> mActivityLifecycleCallbacks = new ArrayList<ActivityLifecycleCallbacks>(); private ArrayList<OnProvideAssistDataListener> mAssistCallbacks = null; /** @hide */ public LoadedApk mLoadedApk; public interface ActivityLifecycleCallbacks { void onActivityCreated(Activity activity, Bundle savedInstanceState); void onActivityStarted(Activity activity); void onActivityResumed(Activity activity); void onActivityPaused(Activity activity); void onActivityStopped(Activity activity); void onActivitySaveInstanceState(Activity activity, Bundle outState); void onActivityDestroyed(Activity activity); } /** * Callback interface for use with {@link Application#registerOnProvideAssistDataListener} * and {@link Application#unregisterOnProvideAssistDataListener}. */ public interface OnProvideAssistDataListener { /** * This is called when the user is requesting an assist, to build a full * {@link Intent#ACTION_ASSIST} Intent with all of the context of the current * application. You can override this method to place into the bundle anything * you would like to appear in the {@link Intent#EXTRA_ASSIST_CONTEXT} part * of the assist Intent. */ public void onProvideAssistData(Activity activity, Bundle data); } public Application() { super(null); } /** * Called when the application is starting, before any activity, service, * or receiver objects (excluding content providers) have been created. * Implementations should be as quick as possible (for example using * lazy initialization of state) since the time spent in this function * directly impacts the performance of starting the first activity, * service, or receiver in a process. * If you override this method, be sure to call super.onCreate(). */ @CallSuper public void onCreate() { } /** * This method is for use in emulated process environments. It will * never be called on a production Android device, where processes are * removed by simply killing them; no user code (including this callback) * is executed when doing so. */ @CallSuper public void onTerminate() { } @CallSuper public void onConfigurationChanged(Configuration newConfig) { Object[] callbacks = collectComponentCallbacks(); if (callbacks != null) { for (int i=0; i<callbacks.length; i++) { ((ComponentCallbacks)callbacks[i]).onConfigurationChanged(newConfig); } } } @CallSuper public void onLowMemory() { Object[] callbacks = collectComponentCallbacks(); if (callbacks != null) { for (int i=0; i<callbacks.length; i++) { ((ComponentCallbacks)callbacks[i]).onLowMemory(); } } } @CallSuper public void onTrimMemory(int level) { Object[] callbacks = collectComponentCallbacks(); if (callbacks != null) { for (int i=0; i<callbacks.length; i++) { Object c = callbacks[i]; if (c instanceof ComponentCallbacks2) { ((ComponentCallbacks2)c).onTrimMemory(level); } } } } public void registerComponentCallbacks(ComponentCallbacks callback) { synchronized (mComponentCallbacks) { mComponentCallbacks.add(callback); } } public void unregisterComponentCallbacks(ComponentCallbacks callback) { synchronized (mComponentCallbacks) { mComponentCallbacks.remove(callback); } } public void registerActivityLifecycleCallbacks(ActivityLifecycleCallbacks callback) { synchronized (mActivityLifecycleCallbacks) { mActivityLifecycleCallbacks.add(callback); } } public void unregisterActivityLifecycleCallbacks(ActivityLifecycleCallbacks callback) { synchronized (mActivityLifecycleCallbacks) { mActivityLifecycleCallbacks.remove(callback); } } public void registerOnProvideAssistDataListener(OnProvideAssistDataListener callback) { synchronized (this) { if (mAssistCallbacks == null) { mAssistCallbacks = new ArrayList<OnProvideAssistDataListener>(); } mAssistCallbacks.add(callback); } } public void unregisterOnProvideAssistDataListener(OnProvideAssistDataListener callback) { synchronized (this) { if (mAssistCallbacks != null) { mAssistCallbacks.remove(callback); } } } // ------------------ Internal API ------------------ /** * @hide */ /* package */ final void attach(Context context) { attachBaseContext(context); mLoadedApk = ContextImpl.getImpl(context).mPackageInfo; } /* package */ void dispatchActivityCreated(Activity activity, Bundle savedInstanceState) { Object[] callbacks = collectActivityLifecycleCallbacks(); if (callbacks != null) { for (int i=0; i<callbacks.length; i++) { ((ActivityLifecycleCallbacks)callbacks[i]).onActivityCreated(activity, savedInstanceState); } } } /* package */ void dispatchActivityStarted(Activity activity) { Object[] callbacks = collectActivityLifecycleCallbacks(); if (callbacks != null) { for (int i=0; i<callbacks.length; i++) { ((ActivityLifecycleCallbacks)callbacks[i]).onActivityStarted(activity); } } } /* package */ void dispatchActivityResumed(Activity activity) { Object[] callbacks = collectActivityLifecycleCallbacks(); if (callbacks != null) { for (int i=0; i<callbacks.length; i++) { ((ActivityLifecycleCallbacks)callbacks[i]).onActivityResumed(activity); } } } /* package */ void dispatchActivityPaused(Activity activity) { Object[] callbacks = collectActivityLifecycleCallbacks(); if (callbacks != null) { for (int i=0; i<callbacks.length; i++) { ((ActivityLifecycleCallbacks)callbacks[i]).onActivityPaused(activity); } } } /* package */ void dispatchActivityStopped(Activity activity) { Object[] callbacks = collectActivityLifecycleCallbacks(); if (callbacks != null) { for (int i=0; i<callbacks.length; i++) { ((ActivityLifecycleCallbacks)callbacks[i]).onActivityStopped(activity); } } } /* package */ void dispatchActivitySaveInstanceState(Activity activity, Bundle outState) { Object[] callbacks = collectActivityLifecycleCallbacks(); if (callbacks != null) { for (int i=0; i<callbacks.length; i++) { ((ActivityLifecycleCallbacks)callbacks[i]).onActivitySaveInstanceState(activity, outState); } } } /* package */ void dispatchActivityDestroyed(Activity activity) { Object[] callbacks = collectActivityLifecycleCallbacks(); if (callbacks != null) { for (int i=0; i<callbacks.length; i++) { ((ActivityLifecycleCallbacks)callbacks[i]).onActivityDestroyed(activity); } } } private Object[] collectComponentCallbacks() { Object[] callbacks = null; synchronized (mComponentCallbacks) { if (mComponentCallbacks.size() > 0) { callbacks = mComponentCallbacks.toArray(); } } return callbacks; } private Object[] collectActivityLifecycleCallbacks() { Object[] callbacks = null; synchronized (mActivityLifecycleCallbacks) { if (mActivityLifecycleCallbacks.size() > 0) { callbacks = mActivityLifecycleCallbacks.toArray(); } } return callbacks; } /* package */ void dispatchOnProvideAssistData(Activity activity, Bundle data) { Object[] callbacks; synchronized (this) { if (mAssistCallbacks == null) { return; } callbacks = mAssistCallbacks.toArray(); } if (callbacks != null) { for (int i=0; i<callbacks.length; i++) { ((OnProvideAssistDataListener)callbacks[i]).onProvideAssistData(activity, data); } } } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ /* * HachageFrame.java * * Created on 20 juin 2011, 15:48:54 */ package hachagewallouf; import javax.swing.UIManager; import javax.swing.UIManager.LookAndFeelInfo; /** * * @author a807087 */ public class HachageFrame extends javax.swing.JFrame { /** Creates new form HachageFrame */ public HachageFrame() { try { boolean find = false; for (LookAndFeelInfo info : UIManager.getInstalledLookAndFeels()) { if ("Nimbus".equals(info.getName())) { UIManager.setLookAndFeel(info.getClassName()); find = true; break; } } if (!find) { UIManager.setLookAndFeel("com.sun.java.swing.plaf.windows.WindowsLookAndFeel"); } } catch (Exception e) { } initComponents(); this.setTitle("Wallouf crypto v1.01"); this.setVisible(true); this.jSplitPane1.setDividerLocation(0.5); this.setLocationRelativeTo(null); } /** This method is called from within the constructor to * initialize the form. * WARNING: Do NOT modify this code. The content of this method is * always regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { jSplitPane1 = new javax.swing.JSplitPane(); jPanel1 = new javax.swing.JPanel(); jLabel1 = new javax.swing.JLabel(); jScrollPane1 = new javax.swing.JScrollPane(); jTextAreaUnCrypt = new javax.swing.JTextArea(); jPanel2 = new javax.swing.JPanel(); jLabel2 = new javax.swing.JLabel(); jScrollPane2 = new javax.swing.JScrollPane(); jTextAreaCrypt = new javax.swing.JTextArea(); jMenuBar1 = new javax.swing.JMenuBar(); jMenu1 = new javax.swing.JMenu(); jMenuItemExit = new javax.swing.JMenuItem(); jMenuTool = new javax.swing.JMenu(); jMenuItemReset = new javax.swing.JMenuItem(); setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE); jSplitPane1.setOrientation(javax.swing.JSplitPane.VERTICAL_SPLIT); jLabel1.setHorizontalAlignment(javax.swing.SwingConstants.CENTER); jLabel1.setText("Uncrypt text area -> Tape the uncrypted test here and copy the result on crypt text area"); jTextAreaUnCrypt.setColumns(20); jTextAreaUnCrypt.setRows(5); jTextAreaUnCrypt.addKeyListener(new java.awt.event.KeyAdapter() { public void keyReleased(java.awt.event.KeyEvent evt) { jTextAreaUnCryptKeyReleased(evt); } }); jScrollPane1.setViewportView(jTextAreaUnCrypt); javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1); jPanel1.setLayout(jPanel1Layout); jPanel1Layout.setHorizontalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel1, javax.swing.GroupLayout.DEFAULT_SIZE, 606, Short.MAX_VALUE) .addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 606, Short.MAX_VALUE) ); jPanel1Layout.setVerticalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addComponent(jLabel1) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 80, Short.MAX_VALUE)) ); jSplitPane1.setTopComponent(jPanel1); jLabel2.setHorizontalAlignment(javax.swing.SwingConstants.CENTER); jLabel2.setText("Crypt text area -> Tape the Crypted test here and copy the result on Uncrypt text area"); jTextAreaCrypt.setColumns(20); jTextAreaCrypt.setRows(5); jTextAreaCrypt.addKeyListener(new java.awt.event.KeyAdapter() { public void keyReleased(java.awt.event.KeyEvent evt) { jTextAreaCryptKeyReleased(evt); } }); jScrollPane2.setViewportView(jTextAreaCrypt); javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2); jPanel2.setLayout(jPanel2Layout); jPanel2Layout.setHorizontalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel2, javax.swing.GroupLayout.DEFAULT_SIZE, 606, Short.MAX_VALUE) .addComponent(jScrollPane2, javax.swing.GroupLayout.DEFAULT_SIZE, 606, Short.MAX_VALUE) ); jPanel2Layout.setVerticalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addComponent(jLabel2) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jScrollPane2, javax.swing.GroupLayout.DEFAULT_SIZE, 462, Short.MAX_VALUE)) ); jSplitPane1.setRightComponent(jPanel2); jMenu1.setText("File"); jMenuItemExit.setText("Exit"); jMenuItemExit.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jMenuItemExitActionPerformed(evt); } }); jMenu1.add(jMenuItemExit); jMenuBar1.add(jMenu1); jMenuTool.setText("Tools"); jMenuItemReset.setText("Reset text area"); jMenuItemReset.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jMenuItemResetActionPerformed(evt); } }); jMenuTool.add(jMenuItemReset); jMenuBar1.add(jMenuTool); setJMenuBar(jMenuBar1); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jSplitPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 608, Short.MAX_VALUE) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jSplitPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 589, Short.MAX_VALUE) ); pack(); }// </editor-fold>//GEN-END:initComponents private void jTextAreaUnCryptKeyReleased(java.awt.event.KeyEvent evt) {//GEN-FIRST:event_jTextAreaUnCryptKeyReleased String uncrypt = jTextAreaUnCrypt.getText(); String crypt = ""; if (!uncrypt.isEmpty()) { for (int i = 0; i < uncrypt.length(); i++) { char c = uncrypt.charAt(i); //Majuscules if ((int) c > 64 && (int) c < 91) { if (((int) (c / 13) % 2) != 0) { crypt += (char) (c + 13); } else { crypt += (char) (c - 13); } } else if ((int) c > 96 && (int) c < 123) { if (((int) (c / 6) % 2) != 0) { crypt += (char) (c + 6); } else { crypt += (char) (c - 6); } } else { crypt += (char) (c); } } jTextAreaCrypt.setText(crypt); } }//GEN-LAST:event_jTextAreaUnCryptKeyReleased private void jTextAreaCryptKeyReleased(java.awt.event.KeyEvent evt) {//GEN-FIRST:event_jTextAreaCryptKeyReleased String uncrypt = jTextAreaCrypt.getText(); String crypt = ""; if (!uncrypt.isEmpty()) { for (int i = 0; i < uncrypt.length(); i++) { char c = uncrypt.charAt(i); //Majuscules if ((int) c > 64 && (int) c < 91) { if (((int) (c / 13) % 2) != 0) { crypt += (char) (c + 13); } else { crypt += (char) (c - 13); } } else if ((int) c > 90 && (int) c < 96 || (int) c > 101 && (int) c < 108 || (int) c > 113 && (int) c < 117) { crypt += (char) (c + 6); } else if ((int) c > 107 && (int) c < 114 || (int) c > 119 && (int) c < 126) { crypt += (char) (c - 6); } else { crypt += (char) (c); } } jTextAreaUnCrypt.setText(crypt); } }//GEN-LAST:event_jTextAreaCryptKeyReleased private void jMenuItemExitActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jMenuItemExitActionPerformed System.exit(0); }//GEN-LAST:event_jMenuItemExitActionPerformed private void jMenuItemResetActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jMenuItemResetActionPerformed jTextAreaCrypt.setText(""); jTextAreaUnCrypt.setText(""); }//GEN-LAST:event_jMenuItemResetActionPerformed // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel2; private javax.swing.JMenu jMenu1; private javax.swing.JMenuBar jMenuBar1; private javax.swing.JMenuItem jMenuItemExit; private javax.swing.JMenuItem jMenuItemReset; private javax.swing.JMenu jMenuTool; private javax.swing.JPanel jPanel1; private javax.swing.JPanel jPanel2; private javax.swing.JScrollPane jScrollPane1; private javax.swing.JScrollPane jScrollPane2; private javax.swing.JSplitPane jSplitPane1; private javax.swing.JTextArea jTextAreaCrypt; private javax.swing.JTextArea jTextAreaUnCrypt; // End of variables declaration//GEN-END:variables }
package com.GameName.Console.Base; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Component; import java.awt.Font; import java.awt.Toolkit; import java.awt.datatransfer.Clipboard; import java.awt.datatransfer.StringSelection; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.util.ArrayList; import javax.swing.Box; import javax.swing.Icon; import javax.swing.JButton; import javax.swing.JComboBox; import javax.swing.JFileChooser; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JTextField; import javax.swing.JTextPane; import javax.swing.ScrollPaneConstants; import javax.swing.text.BadLocationException; import javax.swing.text.DefaultStyledDocument; public class BasicFileViewer extends ConsoleTab implements ActionListener { private static final long serialVersionUID = -9071158131667577707L; private File defaultLocation; private JButton copyButton; private JButton loadButton; private ArrayList<File> files; private JComboBox<String> filesComboBox; private DefaultStyledDocument document; private JTextPane fileTextPane; private JButton findButton; private JTextField searchBar; public BasicFileViewer(String name, Icon icon) { super(name, icon); } public void addComponents() { setLayout(new BorderLayout(0, 0)); Component horizontalStrut = Box.createHorizontalStrut(10); add(horizontalStrut, BorderLayout.WEST); Component horizontalStrut_1 = Box.createHorizontalStrut(10); add(horizontalStrut_1, BorderLayout.EAST); JPanel utilPanel = new JPanel(); utilPanel.setBackground(new Color(225, 225, 225)); add(utilPanel, BorderLayout.NORTH); utilPanel.setLayout(new BorderLayout(0, 0)); JPanel utilFilesComboBoxPanel = new JPanel(); utilFilesComboBoxPanel.setBackground(new Color(225, 225, 225)); utilPanel.add(utilFilesComboBoxPanel, BorderLayout.WEST); Component horizontalStrut_2 = Box.createHorizontalStrut(1); utilFilesComboBoxPanel.add(horizontalStrut_2); files = new ArrayList<>(); filesComboBox = new JComboBox<>(); filesComboBox.setBackground(new Color(225, 225, 225)); utilFilesComboBoxPanel.add(filesComboBox); JPanel utilCopyLoadPanel = new JPanel(); utilCopyLoadPanel.setBackground(new Color(225, 225, 225)); utilPanel.add(utilCopyLoadPanel, BorderLayout.EAST); copyButton = new JButton("Copy"); copyButton.setBackground(new Color(225, 225, 225)); utilCopyLoadPanel.add(copyButton); loadButton = new JButton("Load"); loadButton.setBackground(new Color(225, 225, 225)); utilCopyLoadPanel.add(loadButton); Component horizontalStrut_3 = Box.createHorizontalStrut(1); utilCopyLoadPanel.add(horizontalStrut_3); Component horizontalGlue = Box.createHorizontalGlue(); utilPanel.add(horizontalGlue, BorderLayout.CENTER); JScrollPane fileTextPaneScrollPane = new JScrollPane(); fileTextPaneScrollPane.setBackground(new Color(225, 225, 225)); fileTextPaneScrollPane.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS); add(fileTextPaneScrollPane, BorderLayout.CENTER); document = new DefaultStyledDocument(); fileTextPane = new JTextPane(document); fileTextPane.setEditable(false); fileTextPane.setFont(new Font("Tahoma", Font.PLAIN, 12)); fileTextPaneScrollPane.setViewportView(fileTextPane); JPanel searchPanel = new JPanel(); searchPanel.setBackground(new Color(225, 225, 225)); add(searchPanel, BorderLayout.SOUTH); searchPanel.setLayout(new BorderLayout(0, 0)); Component verticalStrut = Box.createVerticalStrut(5); searchPanel.add(verticalStrut, BorderLayout.NORTH); Component verticalStrut_1 = Box.createVerticalStrut(5); searchPanel.add(verticalStrut_1, BorderLayout.SOUTH); JPanel searchLabelPanel = new JPanel(); searchLabelPanel.setBackground(new Color(225, 225, 225)); searchPanel.add(searchLabelPanel, BorderLayout.WEST); Component horizontalStrut_4 = Box.createHorizontalStrut(5); searchLabelPanel.add(horizontalStrut_4); JLabel searchLabel = new JLabel("Search: "); searchLabel.setFont(new Font("Tahoma", Font.BOLD, 14)); searchLabelPanel.add(searchLabel); JPanel findButtonPanel = new JPanel(); findButtonPanel.setBackground(new Color(225, 225, 225)); searchPanel.add(findButtonPanel, BorderLayout.EAST); findButton = new JButton("Find"); findButton.setBackground(new Color(225, 225, 225)); findButtonPanel.add(findButton); Component horizontalStrut_5 = Box.createHorizontalStrut(5); findButtonPanel.add(horizontalStrut_5); JPanel searchBarPanel = new JPanel(); searchBarPanel.setBackground(new Color(225, 225, 225)); searchPanel.add(searchBarPanel, BorderLayout.CENTER); searchBarPanel.setLayout(new BorderLayout(0, 0)); Component horizontalStrut_6 = Box.createHorizontalStrut(1); searchBarPanel.add(horizontalStrut_6, BorderLayout.WEST); Component verticalStrut_2 = Box.createVerticalStrut(5); searchBarPanel.add(verticalStrut_2, BorderLayout.NORTH); searchBar = new JTextField(); searchBar.setFont(new Font("Tahoma", Font.PLAIN, 12)); searchBar.setColumns(10); searchBarPanel.add(searchBar); Component verticalStrut_3 = Box.createVerticalStrut(5); searchBarPanel.add(verticalStrut_3, BorderLayout.SOUTH); Component horizontalStrut_7 = Box.createHorizontalStrut(1); searchBarPanel.add(horizontalStrut_7, BorderLayout.EAST); findButton.addActionListener(this); searchBar.addActionListener(this); copyButton.addActionListener(this); loadButton.addActionListener(this); filesComboBox.addActionListener(this); } private boolean loading; public void actionPerformed(ActionEvent event) { if(event.getSource() == findButton || event.getSource() == searchBar) { JOptionPane.showMessageDialog(null, "Searching is Broken.", "Error", JOptionPane.ERROR_MESSAGE); // try { // int caretPosition = fileTextPane.getCaretPosition(); // String sample = document.getText(caretPosition, // document.getLength() - caretPosition); // int index = sample.indexOf(searchBar.getText()); // if(index == -1) { // sample = document.getText(0, caretPosition); // index = sample.indexOf(searchBar.getText()) + // caretPosition; // } // // if(index == -1) { // JOptionPane.showMessageDialog(null, "String \"" + searchBar.getText() + "\" was not Found", // "String not Found", JOptionPane.INFORMATION_MESSAGE); // return; // } // System.out.println(index); // fileTextPane.requestFocusInWindow(); // fileTextPane.setCaretPosition(index + searchBar.getText().length()); // fileTextPane.select(index, index + searchBar.getText().length()); // } catch(BadLocationException e) {} } else if(event.getSource() == loadButton) { JFileChooser fileChoser = new JFileChooser(); fileChoser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); fileChoser.setSelectedFile(defaultLocation); File currentFile; int returnVal = fileChoser.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) currentFile = fileChoser.getSelectedFile(); else return; filesComboBox.removeAllItems(); loading = true; for(File file : currentFile.listFiles()) { if(file.isDirectory()) continue; filesComboBox.addItem(file.getName()); files.add(file); } loading = false; } else if(event.getSource() == filesComboBox && !loading) { if(filesComboBox.getSelectedIndex() == -1) return; try { String full = "", read; BufferedReader reader = new BufferedReader(new FileReader( files.get(filesComboBox.getSelectedIndex()) )); while((read = reader.readLine()) != null) full += read + "\n"; document.remove(0, document.getLength()); document.insertString(0, full, null); reader.close(); } catch (IOException | BadLocationException e) { e.printStackTrace(); } } else if(event.getSource() == copyButton) { try { StringSelection stringSelection = new StringSelection(document.getText(0, document.getLength())); Clipboard clipboard = Toolkit.getDefaultToolkit().getSystemClipboard(); clipboard.setContents(stringSelection, null); } catch (BadLocationException e) { e.printStackTrace(); } } } public void setDefaultLocation(File defaultLocation) { this.defaultLocation = defaultLocation; } }
/* * Copyright (C) 2014 Archie L. Cobbs. All rights reserved. * * $Id$ */ package org.jsimpledb.change; /** * Adpater class for the {@link ChangeSwitch} interface. * * @param <R> method return type */ public class ChangeAdapter<R> implements ChangeSwitch<R> { /** * Handle a {@link ObjectCreate} event. * * <p> * The implementation in {@link ChangeAdapter} delegates to {@link #caseChange caseChange()}. * </p> */ @Override public <T> R caseObjectCreate(ObjectCreate<T> change) { return this.caseChange(change); } /** * Handle a {@link ObjectDelete} event. * * <p> * The implementation in {@link ChangeAdapter} delegates to {@link #caseChange caseChange()}. * </p> */ @Override public <T> R caseObjectDelete(ObjectDelete<T> change) { return this.caseChange(change); } /** * Handle a {@link ListFieldAdd} event. * * <p> * The implementation in {@link ChangeAdapter} delegates to {@link #caseListFieldChange caseListFieldChange()}. * </p> */ @Override public <T, E> R caseListFieldAdd(ListFieldAdd<T, E> change) { return this.caseListFieldChange(change); } /** * Handle a {@link ListFieldClear} event. * * <p> * The implementation in {@link ChangeAdapter} delegates to {@link #caseListFieldChange caseListFieldChange()}. * </p> */ @Override public <T> R caseListFieldClear(ListFieldClear<T> change) { return this.caseListFieldChange(change); } /** * Handle a {@link ListFieldRemove} event. * * <p> * The implementation in {@link ChangeAdapter} delegates to {@link #caseListFieldChange caseListFieldChange()}. * </p> */ @Override public <T, E> R caseListFieldRemove(ListFieldRemove<T, E> change) { return this.caseListFieldChange(change); } /** * Handle a {@link ListFieldReplace} event. * * <p> * The implementation in {@link ChangeAdapter} delegates to {@link #caseListFieldChange caseListFieldChange()}. * </p> */ @Override public <T, E> R caseListFieldReplace(ListFieldReplace<T, E> change) { return this.caseListFieldChange(change); } /** * Handle a {@link MapFieldAdd} event. * * <p> * The implementation in {@link ChangeAdapter} delegates to {@link #caseMapFieldChange caseMapFieldChange()}. * </p> */ @Override public <T, K, V> R caseMapFieldAdd(MapFieldAdd<T, K, V> change) { return this.caseMapFieldChange(change); } /** * Handle a {@link MapFieldClear} event. * * <p> * The implementation in {@link ChangeAdapter} delegates to {@link #caseMapFieldChange caseMapFieldChange()}. * </p> */ @Override public <T> R caseMapFieldClear(MapFieldClear<T> change) { return this.caseMapFieldChange(change); } /** * Handle a {@link MapFieldRemove} event. * * <p> * The implementation in {@link ChangeAdapter} delegates to {@link #caseMapFieldChange caseMapFieldChange()}. * </p> */ @Override public <T, K, V> R caseMapFieldRemove(MapFieldRemove<T, K, V> change) { return this.caseMapFieldChange(change); } /** * Handle a {@link MapFieldReplace} event. * * <p> * The implementation in {@link ChangeAdapter} delegates to {@link #caseMapFieldChange caseMapFieldChange()}. * </p> */ @Override public <T, K, V> R caseMapFieldReplace(MapFieldReplace<T, K, V> change) { return this.caseMapFieldChange(change); } /** * Handle a {@link SetFieldAdd} event. * * <p> * The implementation in {@link ChangeAdapter} delegates to {@link #caseSetFieldChange caseSetFieldChange()}. * </p> */ @Override public <T, E> R caseSetFieldAdd(SetFieldAdd<T, E> change) { return this.caseSetFieldChange(change); } /** * Handle a {@link SetFieldClear} event. * * <p> * The implementation in {@link ChangeAdapter} delegates to {@link #caseSetFieldChange caseSetFieldChange()}. * </p> */ @Override public <T> R caseSetFieldClear(SetFieldClear<T> change) { return this.caseSetFieldChange(change); } /** * Handle a {@link SetFieldRemove} event. * * <p> * The implementation in {@link ChangeAdapter} delegates to {@link #caseSetFieldChange caseSetFieldChange()}. * </p> */ @Override public <T, E> R caseSetFieldRemove(SetFieldRemove<T, E> change) { return this.caseSetFieldChange(change); } /** * Handle a {@link SimpleFieldChange} event. * * <p> * The implementation in {@link ChangeAdapter} delegates to {@link #caseSetFieldChange caseFieldChange()}. * </p> */ @Override public <T, V> R caseSimpleFieldChange(SimpleFieldChange<T, V> change) { return this.caseFieldChange(change); } // Roll-Up Methods /** * Internal roll-up method. * * <p> * The implementation in {@link ChangeAdapter} delegates to {@link #caseFieldChange caseFieldChange()}. * </p> * * @param change visiting change * @param <T> changed object type * @return visitor return value */ protected <T> R caseListFieldChange(ListFieldChange<T> change) { return this.caseFieldChange(change); } /** * Internal roll-up method. * * <p> * The implementation in {@link ChangeAdapter} delegates to {@link #caseFieldChange caseFieldChange()}. * </p> * * @param change visiting change * @param <T> changed object type * @return visitor return value */ protected <T> R caseMapFieldChange(MapFieldChange<T> change) { return this.caseFieldChange(change); } /** * Internal roll-up method. * * <p> * The implementation in {@link ChangeAdapter} delegates to {@link #caseFieldChange caseFieldChange()}. * </p> * * @param change visiting change * @param <T> changed object type * @return visitor return value */ protected <T> R caseSetFieldChange(SetFieldChange<T> change) { return this.caseFieldChange(change); } /** * Internal roll-up method. * * <p> * The implementation in {@link ChangeAdapter} delegates to {@link #caseChange caseChange()}. * </p> * * @param change visiting change * @param <T> changed object type * @return visitor return value */ protected <T> R caseFieldChange(FieldChange<T> change) { return this.caseChange(change); } /** * Internal roll-up method. * * <p> * The implementation in {@link ChangeAdapter} returns null. * </p> * * @param change visiting change * @param <T> changed object type * @return visitor return value */ protected <T> R caseChange(Change<T> change) { return null; } }
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.apple; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.analysis.config.BuildOptions; import com.google.devtools.build.lib.analysis.config.ConfigurationFragmentFactory; import com.google.devtools.build.lib.analysis.config.CoreOptions; import com.google.devtools.build.lib.analysis.config.Fragment; import com.google.devtools.build.lib.analysis.config.FragmentOptions; import com.google.devtools.build.lib.analysis.skylark.annotations.SkylarkConfigurationField; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable; import com.google.devtools.build.lib.rules.apple.AppleCommandLineOptions.AppleBitcodeMode; import com.google.devtools.build.lib.rules.apple.ApplePlatform.PlatformType; import com.google.devtools.build.lib.skylarkbuildapi.apple.AppleConfigurationApi; import java.util.ArrayList; import java.util.List; import javax.annotation.Nullable; /** A configuration containing flags required for Apple platforms and tools. */ @Immutable public class AppleConfiguration extends Fragment implements AppleConfigurationApi<PlatformType> { /** * Environment variable name for the xcode version. The value of this environment variable should * be set to the version (for example, "7.2") of xcode to use when invoking part of the apple * toolkit in action execution. **/ public static final String XCODE_VERSION_ENV_NAME = "XCODE_VERSION_OVERRIDE"; /** * Environment variable name for the apple SDK version. If unset, uses the system default of the * host for the platform in the value of {@link #APPLE_SDK_PLATFORM_ENV_NAME}. **/ public static final String APPLE_SDK_VERSION_ENV_NAME = "APPLE_SDK_VERSION_OVERRIDE"; /** * Environment variable name for the apple SDK platform. This should be set for all actions that * require an apple SDK. The valid values consist of {@link ApplePlatform} names. */ public static final String APPLE_SDK_PLATFORM_ENV_NAME = "APPLE_SDK_PLATFORM"; /** Prefix for iOS cpu values. */ public static final String IOS_CPU_PREFIX = "ios_"; /** Default cpu for iOS builds. */ @VisibleForTesting static final String DEFAULT_IOS_CPU = "x86_64"; private final String iosCpu; private final String appleSplitCpu; private final PlatformType applePlatformType; private final ConfigurationDistinguisher configurationDistinguisher; private final ImmutableList<String> iosMultiCpus; private final ImmutableList<String> watchosCpus; private final ImmutableList<String> tvosCpus; private final ImmutableList<String> macosCpus; private final AppleBitcodeMode bitcodeMode; private final Label xcodeConfigLabel; private final AppleCommandLineOptions options; @Nullable private final Label defaultProvisioningProfileLabel; private final boolean mandatoryMinimumVersion; private final boolean objcProviderFromLinked; private AppleConfiguration(AppleCommandLineOptions options, String iosCpu) { this.options = options; this.iosCpu = iosCpu; this.appleSplitCpu = Preconditions.checkNotNull(options.appleSplitCpu, "appleSplitCpu"); this.applePlatformType = Preconditions.checkNotNull(options.applePlatformType, "applePlatformType"); this.configurationDistinguisher = options.configurationDistinguisher; this.iosMultiCpus = ImmutableList.copyOf( Preconditions.checkNotNull(options.iosMultiCpus, "iosMultiCpus")); this.watchosCpus = (options.watchosCpus == null || options.watchosCpus.isEmpty()) ? ImmutableList.of(AppleCommandLineOptions.DEFAULT_WATCHOS_CPU) : ImmutableList.copyOf(options.watchosCpus); this.tvosCpus = (options.tvosCpus == null || options.tvosCpus.isEmpty()) ? ImmutableList.of(AppleCommandLineOptions.DEFAULT_TVOS_CPU) : ImmutableList.copyOf(options.tvosCpus); this.macosCpus = (options.macosCpus == null || options.macosCpus.isEmpty()) ? ImmutableList.of(AppleCommandLineOptions.DEFAULT_MACOS_CPU) : ImmutableList.copyOf(options.macosCpus); this.bitcodeMode = options.appleBitcodeMode; this.xcodeConfigLabel = Preconditions.checkNotNull(options.xcodeVersionConfig, "xcodeConfigLabel"); this.defaultProvisioningProfileLabel = options.defaultProvisioningProfile; this.mandatoryMinimumVersion = options.mandatoryMinimumVersion; this.objcProviderFromLinked = options.objcProviderFromLinked; } /** Determines cpu value from apple-specific toolchain identifier. */ public static String iosCpuFromCpu(String cpu) { if (cpu.startsWith(IOS_CPU_PREFIX)) { return cpu.substring(IOS_CPU_PREFIX.length()); } else { return DEFAULT_IOS_CPU; } } public AppleCommandLineOptions getOptions() { return options; } /** * Returns a map of environment variables (derived from configuration) that should be propagated * for actions pertaining to building applications for apple platforms. These environment * variables are needed to use apple toolkits. Keys are variable names and values are their * corresponding values. */ public static ImmutableMap <String, String> appleTargetPlatformEnv( ApplePlatform platform, DottedVersion sdkVersion) { ImmutableMap.Builder<String, String> builder = ImmutableMap.builder(); builder .put(AppleConfiguration.APPLE_SDK_VERSION_ENV_NAME, sdkVersion.toStringWithMinimumComponents(2)) .put(AppleConfiguration.APPLE_SDK_PLATFORM_ENV_NAME, platform.getNameInPlist()); return builder.build(); } /** * Returns a map of environment variables that should be propagated for actions that require a * version of xcode to be explicitly declared. Keys are variable names and values are their * corresponding values. */ public static ImmutableMap<String, String> getXcodeVersionEnv(DottedVersion xcodeVersion) { if (xcodeVersion != null) { return ImmutableMap.of(AppleConfiguration.XCODE_VERSION_ENV_NAME, xcodeVersion.toString()); } else { return ImmutableMap.of(); } } /** * Returns the value of {@code ios_cpu} for this configuration. This is not necessarily the * platform or cpu for all actions spawned in this configuration; it is appropriate for * identifying the target cpu of iOS compile and link actions within this configuration. */ @Override public String getIosCpu() { return iosCpu; } /** * Gets the single "effective" architecture for this configuration's {@link PlatformType} (for * example, "i386" or "arm64"). Prefer this over {@link #getMultiArchitectures(PlatformType)} only * if in the context of rule logic which is only concerned with a single architecture (such as in * {@code objc_library}, which registers single-architecture compile actions). * * <p>Single effective architecture is determined using the following rules: * * <ol> * <li>If {@code --apple_split_cpu} is set (done via prior configuration transition), then that is * the effective architecture. * <li>If the multi cpus flag (e.g. {@code --ios_multi_cpus}) is set and non-empty, then the first * such architecture is returned. * <li>In the case of iOS, use {@code --ios_cpu} for backwards compatibility. * <li>Use the default. * </ol> */ @Override public String getSingleArchitecture() { if (!Strings.isNullOrEmpty(appleSplitCpu)) { return appleSplitCpu; } switch (applePlatformType) { case IOS: if (!getIosMultiCpus().isEmpty()) { return getIosMultiCpus().get(0); } else { return getIosCpu(); } case WATCHOS: return watchosCpus.get(0); case TVOS: return tvosCpus.get(0); case MACOS: return macosCpus.get(0); default: throw new IllegalArgumentException("Unhandled platform type " + applePlatformType); } } /** * Gets the "effective" architecture(s) for the given {@link PlatformType}. For example, * "i386" or "arm64". At least one architecture is always returned. Prefer this over * {@link #getSingleArchitecture} in rule logic which may support multiple architectures, such * as bundling rules. * * <p>Effective architecture(s) is determined using the following rules: * <ol> * <li>If {@code --apple_split_cpu} is set (done via prior configuration transition), then * that is the effective architecture.</li> * <li>If the multi-cpu flag (for example, {@code --ios_multi_cpus}) is non-empty, then, return * all architectures from that flag.</li> * <li>In the case of iOS, use {@code --ios_cpu} for backwards compatibility.</li> * <li>Use the default.</li></ol> * * @throws IllegalArgumentException if {@code --apple_platform_type} is set (via prior * configuration transition) yet does not match {@code platformType} */ public List<String> getMultiArchitectures(PlatformType platformType) { if (!Strings.isNullOrEmpty(appleSplitCpu)) { if (applePlatformType != platformType) { throw new IllegalArgumentException( String.format("Expected post-split-transition platform type %s to match input %s ", applePlatformType, platformType)); } return ImmutableList.of(appleSplitCpu); } switch (platformType) { case IOS: if (getIosMultiCpus().isEmpty()) { return ImmutableList.of(getIosCpu()); } else { return getIosMultiCpus(); } case WATCHOS: return watchosCpus; case TVOS: return tvosCpus; case MACOS: return macosCpus; default: throw new IllegalArgumentException("Unhandled platform type " + platformType); } } /** * Gets the single "effective" platform for this configuration's {@link PlatformType} and * architecture. Prefer this over {@link #getMultiArchPlatform(PlatformType)} only in cases if in * the context of rule logic which is only concerned with a single architecture (such as in {@code * objc_library}, which registers single-architecture compile actions). */ @Override public ApplePlatform getSingleArchPlatform() { return ApplePlatform.forTarget(applePlatformType, getSingleArchitecture()); } private boolean hasValidSingleArchPlatform() { return ApplePlatform.isApplePlatform( ApplePlatform.cpuStringForTarget(applePlatformType, getSingleArchitecture())); } /** * Gets the current configuration {@link ApplePlatform} for the given {@link PlatformType}. * ApplePlatform is determined via a combination between the given platform type and the * "effective" architectures of this configuration, as returned by {@link #getMultiArchitectures}; * if any of the supported architectures are of device type, this will return a device platform. * Otherwise, this will return a simulator platform. */ // TODO(bazel-team): This should support returning multiple platforms. @Override public ApplePlatform getMultiArchPlatform(PlatformType platformType) { List<String> architectures = getMultiArchitectures(platformType); switch (platformType) { case IOS: for (String arch : architectures) { if (ApplePlatform.forTarget(PlatformType.IOS, arch) == ApplePlatform.IOS_DEVICE) { return ApplePlatform.IOS_DEVICE; } } return ApplePlatform.IOS_SIMULATOR; case WATCHOS: for (String arch : architectures) { if (ApplePlatform.forTarget(PlatformType.WATCHOS, arch) == ApplePlatform.WATCHOS_DEVICE) { return ApplePlatform.WATCHOS_DEVICE; } } return ApplePlatform.WATCHOS_SIMULATOR; case TVOS: for (String arch : architectures) { if (ApplePlatform.forTarget(PlatformType.TVOS, arch) == ApplePlatform.TVOS_DEVICE) { return ApplePlatform.TVOS_DEVICE; } } return ApplePlatform.TVOS_SIMULATOR; case MACOS: return ApplePlatform.MACOS; default: throw new IllegalArgumentException("Unsupported platform type " + platformType); } } /** * Returns the {@link ApplePlatform} represented by {@code ios_cpu} (see {@link #getIosCpu}. (For * example, {@code i386} maps to {@link ApplePlatform#IOS_SIMULATOR}.) Note that this is not * necessarily the effective platform for all ios actions in the current context: This is * typically the correct platform for implicityly-ios compile and link actions in the current * context. For effective platform for bundling actions, see {@link * #getMultiArchPlatform(PlatformType)}. */ // TODO(b/28754442): Deprecate for more general Starlark-exposed platform retrieval. @Override public ApplePlatform getIosCpuPlatform() { return ApplePlatform.forTarget(PlatformType.IOS, iosCpu); } /** * Returns the architecture for which we keep dependencies that should be present only once (in a * single architecture). * * <p>When building with multiple architectures there are some dependencies we want to avoid * duplicating: they would show up more than once in the same location in the final application * bundle which is illegal. Instead we pick one architecture for which to keep all dependencies * and discard any others. */ public String getDependencySingleArchitecture() { if (!getIosMultiCpus().isEmpty()) { return getIosMultiCpus().get(0); } return getIosCpu(); } /** * List of all CPUs that this invocation is being built for. Different from {@link #getIosCpu()} * which is the specific CPU <b>this target</b> is being built for. */ public ImmutableList<String> getIosMultiCpus() { return iosMultiCpus; } /** * Returns the label of the default provisioning profile to use when bundling/signing an ios * application. Returns null if the target platform is not an iOS device (for example, if * iOS simulator is being targeted). */ @Nullable public Label getDefaultProvisioningProfileLabel() { return defaultProvisioningProfileLabel; } /** * Returns the bitcode mode to use for compilation steps. This should only be invoked in * single-architecture contexts. * * <p>Users can control bitcode mode using the {@code apple_bitcode} build flag, but bitcode * will be disabled for all simulator architectures regardless of this flag. * * @see AppleBitcodeMode */ @Override public AppleBitcodeMode getBitcodeMode() { if (hasValidSingleArchPlatform() && getSingleArchPlatform().isDevice()) { return bitcodeMode; } else { return AppleBitcodeMode.NONE; } } /** * Returns the label of the xcode_config rule to use for resolving the host system xcode version. */ @SkylarkConfigurationField( name = "xcode_config_label", doc = "Returns the target denoted by the value of the --xcode_version_config flag", defaultLabel = AppleCommandLineOptions.DEFAULT_XCODE_VERSION_CONFIG_LABEL, defaultInToolRepository = true ) public Label getXcodeConfigLabel() { return xcodeConfigLabel; } @Nullable @Override public String getOutputDirectoryName() { List<String> components = new ArrayList<>(); if (!appleSplitCpu.isEmpty()) { components.add(applePlatformType.toString().toLowerCase()); components.add(appleSplitCpu); if (options.getMinimumOsVersion() != null) { components.add("min" + options.getMinimumOsVersion()); } } if (configurationDistinguisher != ConfigurationDistinguisher.UNKNOWN) { components.add(configurationDistinguisher.getFileSystemName()); } if (components.isEmpty()) { return null; } return Joiner.on('-').join(components); } /** Returns true if the minimum_os_version attribute should be mandatory on rules with linking. */ public boolean isMandatoryMinimumVersion() { return mandatoryMinimumVersion; } /** * Returns true if rules which manage link actions should propagate {@link ObjcProvider} at the * top level. **/ public boolean shouldLinkingRulesPropagateObjc() { return objcProviderFromLinked; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof AppleConfiguration)) { return false; } AppleConfiguration that = (AppleConfiguration) obj; return this.options.equals(that.options); } @Override public int hashCode() { return options.hashCode(); } @VisibleForTesting static AppleConfiguration create(AppleCommandLineOptions appleOptions, String cpu) { return new AppleConfiguration(appleOptions, iosCpuFromCpu(cpu)); } /** * Loads {@link AppleConfiguration} from build options. */ public static class Loader implements ConfigurationFragmentFactory { @Override public AppleConfiguration create(BuildOptions buildOptions) { AppleCommandLineOptions appleOptions = buildOptions.get(AppleCommandLineOptions.class); String cpu = buildOptions.get(CoreOptions.class).cpu; return AppleConfiguration.create(appleOptions, cpu); } @Override public Class<? extends Fragment> creates() { return AppleConfiguration.class; } @Override public ImmutableSet<Class<? extends FragmentOptions>> requiredOptions() { return ImmutableSet.<Class<? extends FragmentOptions>>of(AppleCommandLineOptions.class); } } /** * Value used to avoid multiple configurations from conflicting. No two instances of this * transition may exist with the same value in a single Bazel invocation. */ public enum ConfigurationDistinguisher { UNKNOWN("unknown"), /** Distinguisher for {@code apple_binary} rule with "ios" platform_type. */ APPLEBIN_IOS("applebin_ios"), /** Distinguisher for {@code apple_binary} rule with "watchos" platform_type. */ APPLEBIN_WATCHOS("applebin_watchos"), /** Distinguisher for {@code apple_binary} rule with "tvos" platform_type. */ APPLEBIN_TVOS("applebin_tvos"), /** Distinguisher for {@code apple_binary} rule with "macos" platform_type. */ APPLEBIN_MACOS("applebin_macos"), /** * Distinguisher for the apple crosstool configuration. We use "apl" for output directory * names instead of "apple_crosstool" to avoid oversized path names, which can be problematic * on OSX. */ APPLE_CROSSTOOL("apl"); private final String fileSystemName; private ConfigurationDistinguisher(String fileSystemName) { this.fileSystemName = fileSystemName; } /** * Returns the distinct string that should be used in creating output directories for a * configuration with this distinguisher. */ public String getFileSystemName() { return fileSystemName; } } }
/* * Copyright 2019 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.server.controller.api.model.spec; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import org.kie.server.api.model.KieServerMode; import org.kie.server.controller.api.model.runtime.ServerInstanceKey; @XmlAccessorType(XmlAccessType.FIELD) @XmlRootElement(name = "server-template-details") public class ServerTemplate extends ServerTemplateKey { @XmlElement(name = "container-specs") private Collection<ContainerSpec> containersSpec = new ArrayList<ContainerSpec>(); @XmlElement(name = "server-config") private Map<Capability, ServerConfig> configs = new HashMap<Capability, ServerConfig>(); @XmlElement(name = "server-instances") private Collection<ServerInstanceKey> serverInstances = new ArrayList<ServerInstanceKey>(); @XmlElement(name="capabilities") private List<String> capabilities = new ArrayList<String>(); @XmlElement(name="mode") private KieServerMode mode; public ServerTemplate() { } public ServerTemplate( final String id, final String name ) { super( id, name ); } public ServerTemplate( final String id, final String name, final Collection<String> capabilities, final Map<Capability, ServerConfig> configs, final Collection<ContainerSpec> containersSpec ) { super( id, name ); this.capabilities.addAll( capabilities ); this.configs.putAll( configs ); this.containersSpec.addAll(containersSpec); } public ServerTemplate( final String id, final String name, final Collection<String> capabilities, final Map<Capability, ServerConfig> configs, final Collection<ContainerSpec> containersSpec, final Collection<ServerInstanceKey> serverIntanceKeys ) { this( id, name, capabilities, configs, containersSpec ); this.serverInstances.addAll(serverIntanceKeys); } public Map<Capability, ServerConfig> getConfigs() { if (configs == null) { configs = new HashMap<Capability, ServerConfig>(); } return new HashMap<Capability, ServerConfig>( configs ); } public Collection<ContainerSpec> getContainersSpec() { if (containersSpec == null) { containersSpec = new ArrayList<ContainerSpec>(); } //Errai doesn't play nice with unmod collection return new ArrayList<ContainerSpec>( containersSpec ); } public Collection<ServerInstanceKey> getServerInstanceKeys() { if (serverInstances == null) { serverInstances = new ArrayList<ServerInstanceKey>(); } return new ArrayList<ServerInstanceKey>( serverInstances ); } public boolean hasContainerSpec(String containerSpecId) { for (ContainerSpec spec : getContainersSpec()) { if (containerSpecId.equals(spec.getId())) { return true; } } return false; } public ContainerSpec getContainerSpec(String containerSpecId) { for (ContainerSpec spec : getContainersSpec()) { if (containerSpecId.equals(spec.getId())) { return spec; } } return null; } public void addContainerSpec(ContainerSpec containerSpec) { if (containersSpec == null) { containersSpec = new ArrayList<ContainerSpec>(); } containersSpec.add(containerSpec); } public void deleteContainerSpec(String containerSpecId) { if (containersSpec == null) { return; } Iterator<ContainerSpec> iterator = containersSpec.iterator(); while(iterator.hasNext()) { ContainerSpec spec = iterator.next(); if (containerSpecId.equals(spec.getId())) { iterator.remove(); } } } public boolean hasServerInstance(String serverInstanceUrl) { for (ServerInstanceKey spec : getServerInstanceKeys()) { if (serverInstanceUrl.equals(spec.getUrl())) { return true; } } return false; } public boolean hasServerInstanceId(String serverInstanceId) { for (ServerInstanceKey instance : getServerInstanceKeys()) { if (instance.getServerInstanceId().equals(serverInstanceId)) { return true; } } return false; } public ServerInstanceKey getServerInstance(String serverInstanceId) { for (ServerInstanceKey instance : getServerInstanceKeys()) { if (instance.getServerInstanceId().equals(serverInstanceId)) { return instance; } } return null; } public void addServerInstance(ServerInstanceKey serverInstance) { if (serverInstances == null) { serverInstances = new ArrayList<ServerInstanceKey>(); } if (!serverInstances.contains(serverInstance)) { serverInstances.add(serverInstance); } } public void deleteServerInstance(String serverInstanceId) { if (serverInstances == null) { return; } Iterator<ServerInstanceKey> iterator = serverInstances.iterator(); while(iterator.hasNext()) { ServerInstanceKey serverInstanceKey = iterator.next(); if (serverInstanceId.equals(serverInstanceKey.getServerInstanceId())) { iterator.remove(); } } } public void setContainersSpec(Collection<ContainerSpec> containersSpec) { this.containersSpec = containersSpec; } public void setConfigs(Map<Capability, ServerConfig> configs) { this.configs = configs; } public List<String> getCapabilities() { return capabilities; } public void setCapabilities(List<String> capabilities) { this.capabilities = capabilities; } public KieServerMode getMode() { return mode; } public void setMode(KieServerMode mode) { this.mode = mode; } public boolean hasMatchingId(ServerTemplateKey serverTemplateKey) { return getId().equals(serverTemplateKey.getId()); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof ServerTemplate)) { return false; } if (!super.equals(o)) { return false; } ServerTemplate that = (ServerTemplate) o; if (capabilities != null ? !capabilities.equals(that.capabilities) : that.capabilities != null) { return false; } return true; } @Override public int hashCode() { int result = super.hashCode(); result = 31 * result + (capabilities != null ? capabilities.hashCode() : 0); return result; } }
/** * <copyright> * </copyright> * * $Id$ */ package net.opengis.gml.presentation; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.EventObject; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IMarker; import org.eclipse.core.resources.IResource; import org.eclipse.core.resources.IResourceChangeEvent; import org.eclipse.core.resources.IResourceChangeListener; import org.eclipse.core.resources.IResourceDelta; import org.eclipse.core.resources.IResourceDeltaVisitor; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IPath; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.NullProgressMonitor; import org.eclipse.jface.action.IMenuListener; import org.eclipse.jface.action.IMenuManager; import org.eclipse.jface.action.IStatusLineManager; import org.eclipse.jface.action.IToolBarManager; import org.eclipse.jface.action.MenuManager; import org.eclipse.jface.action.Separator; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.jface.dialogs.ProgressMonitorDialog; import org.eclipse.jface.viewers.ColumnWeightData; import org.eclipse.jface.viewers.ISelection; import org.eclipse.jface.viewers.ISelectionChangedListener; import org.eclipse.jface.viewers.ISelectionProvider; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.viewers.ListViewer; import org.eclipse.jface.viewers.SelectionChangedEvent; import org.eclipse.jface.viewers.StructuredSelection; import org.eclipse.jface.viewers.StructuredViewer; import org.eclipse.jface.viewers.TableLayout; import org.eclipse.jface.viewers.TableViewer; import org.eclipse.jface.viewers.TreeViewer; import org.eclipse.jface.viewers.Viewer; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CTabFolder; import org.eclipse.swt.dnd.DND; import org.eclipse.swt.dnd.Transfer; import org.eclipse.swt.events.ControlAdapter; import org.eclipse.swt.events.ControlEvent; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.layout.FillLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Menu; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.TableColumn; import org.eclipse.swt.widgets.Tree; import org.eclipse.swt.widgets.TreeColumn; import org.eclipse.ui.IActionBars; import org.eclipse.ui.IEditorInput; import org.eclipse.ui.IEditorPart; import org.eclipse.ui.IEditorSite; import org.eclipse.ui.IPartListener; import org.eclipse.ui.IWorkbenchPart; import org.eclipse.ui.PartInitException; import org.eclipse.ui.dialogs.SaveAsDialog; import org.eclipse.ui.ide.IGotoMarker; import org.eclipse.ui.part.FileEditorInput; import org.eclipse.ui.part.MultiPageEditorPart; import org.eclipse.ui.views.contentoutline.ContentOutline; import org.eclipse.ui.views.contentoutline.ContentOutlinePage; import org.eclipse.ui.views.contentoutline.IContentOutlinePage; import org.eclipse.ui.views.properties.IPropertySheetPage; import org.eclipse.ui.views.properties.PropertySheet; import org.eclipse.ui.views.properties.PropertySheetPage; import org.eclipse.emf.common.command.BasicCommandStack; import org.eclipse.emf.common.command.Command; import org.eclipse.emf.common.command.CommandStack; import org.eclipse.emf.common.command.CommandStackListener; import org.eclipse.emf.common.notify.AdapterFactory; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.ui.MarkerHelper; import org.eclipse.emf.common.ui.ViewerPane; import org.eclipse.emf.common.ui.editor.ProblemEditorPart; import org.eclipse.emf.common.ui.viewer.IViewerProvider; import org.eclipse.emf.common.util.BasicDiagnostic; import org.eclipse.emf.common.util.Diagnostic; import org.eclipse.emf.common.util.URI; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EValidator; import org.eclipse.emf.ecore.resource.Resource; import org.eclipse.emf.ecore.resource.ResourceSet; import org.eclipse.emf.ecore.util.EContentAdapter; import org.eclipse.emf.ecore.util.EcoreUtil; import org.eclipse.emf.edit.domain.AdapterFactoryEditingDomain; import org.eclipse.emf.edit.domain.EditingDomain; import org.eclipse.emf.edit.domain.IEditingDomainProvider; import org.eclipse.emf.edit.provider.AdapterFactoryItemDelegator; import org.eclipse.emf.edit.provider.ComposedAdapterFactory; import org.eclipse.emf.edit.provider.ReflectiveItemProviderAdapterFactory; import org.eclipse.emf.edit.provider.resource.ResourceItemProviderAdapterFactory; import org.eclipse.emf.edit.ui.action.EditingDomainActionBarContributor; import org.eclipse.emf.edit.ui.celleditor.AdapterFactoryTreeEditor; import org.eclipse.emf.edit.ui.dnd.EditingDomainViewerDropAdapter; import org.eclipse.emf.edit.ui.dnd.LocalTransfer; import org.eclipse.emf.edit.ui.dnd.ViewerDragAdapter; import org.eclipse.emf.edit.ui.provider.AdapterFactoryContentProvider; import org.eclipse.emf.edit.ui.provider.AdapterFactoryLabelProvider; import org.eclipse.emf.edit.ui.provider.UnwrappingSelectionProvider; import org.eclipse.emf.edit.ui.util.EditUIMarkerHelper; import org.eclipse.emf.edit.ui.util.EditUIUtil; import org.eclipse.emf.edit.ui.view.ExtendedPropertySheetPage; import net.opengis.gml.provider.GmlItemProviderAdapterFactory; import net.opengis.citygml.appearance.provider.AppearanceItemProviderAdapterFactory; import net.opengis.citygml.building.presentation.CityGMLEditorPlugin; import net.opengis.citygml.building.provider.BuildingItemProviderAdapterFactory; import net.opengis.citygml.cityfurniture.provider.CityfurnitureItemProviderAdapterFactory; import net.opengis.citygml.cityobjectgroup.provider.CityobjectgroupItemProviderAdapterFactory; import net.opengis.citygml.generics.provider.GenericsItemProviderAdapterFactory; import net.opengis.citygml.landuse.provider.LanduseItemProviderAdapterFactory; import net.opengis.citygml.provider.CitygmlItemProviderAdapterFactory; import net.opengis.citygml.relief.provider.ReliefItemProviderAdapterFactory; import net.opengis.citygml.texturedsurface.provider.TexturedsurfaceItemProviderAdapterFactory; import net.opengis.citygml.transportation.provider.TransportationItemProviderAdapterFactory; import net.opengis.citygml.vegetation.provider.VegetationItemProviderAdapterFactory; import net.opengis.citygml.waterbody.provider.WaterbodyItemProviderAdapterFactory; import org.eclipse.ui.actions.WorkspaceModifyOperation; import org.oasis.xAL.provider.XALItemProviderAdapterFactory; import org.w3._1999.xlink.provider.XlinkItemProviderAdapterFactory; import org.w3._2001.smil20.language.provider.LanguageItemProviderAdapterFactory; import org.w3._2001.smil20.provider.Smil20ItemProviderAdapterFactory; import org.w3.xml._1998.namespace.provider.NamespaceItemProviderAdapterFactory; /** * This is an example of a Gml model editor. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public class GmlEditor extends MultiPageEditorPart implements IEditingDomainProvider, ISelectionProvider, IMenuListener, IViewerProvider, IGotoMarker { /** * This keeps track of the editing domain that is used to track all changes to the model. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected AdapterFactoryEditingDomain editingDomain; /** * This is the one adapter factory used for providing views of the model. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ComposedAdapterFactory adapterFactory; /** * This is the content outline page. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected IContentOutlinePage contentOutlinePage; /** * This is a kludge... * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected IStatusLineManager contentOutlineStatusLineManager; /** * This is the content outline page's viewer. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected TreeViewer contentOutlineViewer; /** * This is the property sheet page. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected PropertySheetPage propertySheetPage; /** * This is the viewer that shadows the selection in the content outline. * The parent relation must be correctly defined for this to work. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected TreeViewer selectionViewer; /** * This inverts the roll of parent and child in the content provider and show parents as a tree. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected TreeViewer parentViewer; /** * This shows how a tree view works. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected TreeViewer treeViewer; /** * This shows how a list view works. * A list viewer doesn't support icons. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ListViewer listViewer; /** * This shows how a table view works. * A table can be used as a list with icons. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected TableViewer tableViewer; /** * This shows how a tree view with columns works. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected TreeViewer treeViewerWithColumns; /** * This keeps track of the active viewer pane, in the book. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ViewerPane currentViewerPane; /** * This keeps track of the active content viewer, which may be either one of the viewers in the pages or the content outline viewer. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected Viewer currentViewer; /** * This listens to which ever viewer is active. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ISelectionChangedListener selectionChangedListener; /** * This keeps track of all the {@link org.eclipse.jface.viewers.ISelectionChangedListener}s that are listening to this editor. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected Collection<ISelectionChangedListener> selectionChangedListeners = new ArrayList<ISelectionChangedListener>(); /** * This keeps track of the selection of the editor as a whole. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ISelection editorSelection = StructuredSelection.EMPTY; /** * The MarkerHelper is responsible for creating workspace resource markers presented * in Eclipse's Problems View. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected MarkerHelper markerHelper = new EditUIMarkerHelper(); /** * This listens for when the outline becomes active * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected IPartListener partListener = new IPartListener() { public void partActivated(IWorkbenchPart p) { if (p instanceof ContentOutline) { if (((ContentOutline)p).getCurrentPage() == contentOutlinePage) { getActionBarContributor().setActiveEditor(GmlEditor.this); setCurrentViewer(contentOutlineViewer); } } else if (p instanceof PropertySheet) { if (((PropertySheet)p).getCurrentPage() == propertySheetPage) { getActionBarContributor().setActiveEditor(GmlEditor.this); handleActivate(); } } else if (p == GmlEditor.this) { handleActivate(); } } public void partBroughtToTop(IWorkbenchPart p) { // Ignore. } public void partClosed(IWorkbenchPart p) { // Ignore. } public void partDeactivated(IWorkbenchPart p) { // Ignore. } public void partOpened(IWorkbenchPart p) { // Ignore. } }; /** * Resources that have been removed since last activation. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected Collection<Resource> removedResources = new ArrayList<Resource>(); /** * Resources that have been changed since last activation. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected Collection<Resource> changedResources = new ArrayList<Resource>(); /** * Resources that have been saved. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected Collection<Resource> savedResources = new ArrayList<Resource>(); /** * Map to store the diagnostic associated with a resource. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected Map<Resource, Diagnostic> resourceToDiagnosticMap = new LinkedHashMap<Resource, Diagnostic>(); /** * Controls whether the problem indication should be updated. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected boolean updateProblemIndication = true; /** * Adapter used to update the problem indication when resources are demanded loaded. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected EContentAdapter problemIndicationAdapter = new EContentAdapter() { @Override public void notifyChanged(Notification notification) { if (notification.getNotifier() instanceof Resource) { switch (notification.getFeatureID(Resource.class)) { case Resource.RESOURCE__IS_LOADED: case Resource.RESOURCE__ERRORS: case Resource.RESOURCE__WARNINGS: { Resource resource = (Resource)notification.getNotifier(); Diagnostic diagnostic = analyzeResourceProblems(resource, null); if (diagnostic.getSeverity() != Diagnostic.OK) { resourceToDiagnosticMap.put(resource, diagnostic); } else { resourceToDiagnosticMap.remove(resource); } if (updateProblemIndication) { getSite().getShell().getDisplay().asyncExec (new Runnable() { public void run() { updateProblemIndication(); } }); } break; } } } else { super.notifyChanged(notification); } } @Override protected void setTarget(Resource target) { basicSetTarget(target); } @Override protected void unsetTarget(Resource target) { basicUnsetTarget(target); } }; /** * This listens for workspace changes. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected IResourceChangeListener resourceChangeListener = new IResourceChangeListener() { public void resourceChanged(IResourceChangeEvent event) { IResourceDelta delta = event.getDelta(); try { class ResourceDeltaVisitor implements IResourceDeltaVisitor { protected ResourceSet resourceSet = editingDomain.getResourceSet(); protected Collection<Resource> changedResources = new ArrayList<Resource>(); protected Collection<Resource> removedResources = new ArrayList<Resource>(); public boolean visit(IResourceDelta delta) { if (delta.getResource().getType() == IResource.FILE) { if (delta.getKind() == IResourceDelta.REMOVED || delta.getKind() == IResourceDelta.CHANGED && delta.getFlags() != IResourceDelta.MARKERS) { Resource resource = resourceSet.getResource(URI.createPlatformResourceURI(delta.getFullPath().toString(), true), false); if (resource != null) { if (delta.getKind() == IResourceDelta.REMOVED) { removedResources.add(resource); } else if (!savedResources.remove(resource)) { changedResources.add(resource); } } } } return true; } public Collection<Resource> getChangedResources() { return changedResources; } public Collection<Resource> getRemovedResources() { return removedResources; } } final ResourceDeltaVisitor visitor = new ResourceDeltaVisitor(); delta.accept(visitor); if (!visitor.getRemovedResources().isEmpty()) { getSite().getShell().getDisplay().asyncExec (new Runnable() { public void run() { removedResources.addAll(visitor.getRemovedResources()); if (!isDirty()) { getSite().getPage().closeEditor(GmlEditor.this, false); } } }); } if (!visitor.getChangedResources().isEmpty()) { getSite().getShell().getDisplay().asyncExec (new Runnable() { public void run() { changedResources.addAll(visitor.getChangedResources()); if (getSite().getPage().getActiveEditor() == GmlEditor.this) { handleActivate(); } } }); } } catch (CoreException exception) { CityGMLEditorPlugin.INSTANCE.log(exception); } } }; /** * Handles activation of the editor or it's associated views. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void handleActivate() { // Recompute the read only state. // if (editingDomain.getResourceToReadOnlyMap() != null) { editingDomain.getResourceToReadOnlyMap().clear(); // Refresh any actions that may become enabled or disabled. // setSelection(getSelection()); } if (!removedResources.isEmpty()) { if (handleDirtyConflict()) { getSite().getPage().closeEditor(GmlEditor.this, false); } else { removedResources.clear(); changedResources.clear(); savedResources.clear(); } } else if (!changedResources.isEmpty()) { changedResources.removeAll(savedResources); handleChangedResources(); changedResources.clear(); savedResources.clear(); } } /** * Handles what to do with changed resources on activation. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void handleChangedResources() { if (!changedResources.isEmpty() && (!isDirty() || handleDirtyConflict())) { if (isDirty()) { changedResources.addAll(editingDomain.getResourceSet().getResources()); } editingDomain.getCommandStack().flush(); updateProblemIndication = false; for (Resource resource : changedResources) { if (resource.isLoaded()) { resource.unload(); try { resource.load(Collections.EMPTY_MAP); } catch (IOException exception) { if (!resourceToDiagnosticMap.containsKey(resource)) { resourceToDiagnosticMap.put(resource, analyzeResourceProblems(resource, exception)); } } } } if (AdapterFactoryEditingDomain.isStale(editorSelection)) { setSelection(StructuredSelection.EMPTY); } updateProblemIndication = true; updateProblemIndication(); } } /** * Updates the problems indication with the information described in the specified diagnostic. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void updateProblemIndication() { if (updateProblemIndication) { BasicDiagnostic diagnostic = new BasicDiagnostic (Diagnostic.OK, "de.hub.citygml.emf.ecore.editor", 0, null, new Object [] { editingDomain.getResourceSet() }); for (Diagnostic childDiagnostic : resourceToDiagnosticMap.values()) { if (childDiagnostic.getSeverity() != Diagnostic.OK) { diagnostic.add(childDiagnostic); } } int lastEditorPage = getPageCount() - 1; if (lastEditorPage >= 0 && getEditor(lastEditorPage) instanceof ProblemEditorPart) { ((ProblemEditorPart)getEditor(lastEditorPage)).setDiagnostic(diagnostic); if (diagnostic.getSeverity() != Diagnostic.OK) { setActivePage(lastEditorPage); } } else if (diagnostic.getSeverity() != Diagnostic.OK) { ProblemEditorPart problemEditorPart = new ProblemEditorPart(); problemEditorPart.setDiagnostic(diagnostic); problemEditorPart.setMarkerHelper(markerHelper); try { addPage(++lastEditorPage, problemEditorPart, getEditorInput()); setPageText(lastEditorPage, problemEditorPart.getPartName()); setActivePage(lastEditorPage); showTabs(); } catch (PartInitException exception) { CityGMLEditorPlugin.INSTANCE.log(exception); } } if (markerHelper.hasMarkers(editingDomain.getResourceSet())) { markerHelper.deleteMarkers(editingDomain.getResourceSet()); if (diagnostic.getSeverity() != Diagnostic.OK) { try { markerHelper.createMarkers(diagnostic); } catch (CoreException exception) { CityGMLEditorPlugin.INSTANCE.log(exception); } } } } } /** * Shows a dialog that asks if conflicting changes should be discarded. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected boolean handleDirtyConflict() { return MessageDialog.openQuestion (getSite().getShell(), getString("_UI_FileConflict_label"), getString("_WARN_FileConflict")); } /** * This creates a model editor. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public GmlEditor() { super(); initializeEditingDomain(); } /** * This sets up the editing domain for the model editor. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void initializeEditingDomain() { // Create an adapter factory that yields item providers. // adapterFactory = new ComposedAdapterFactory(ComposedAdapterFactory.Descriptor.Registry.INSTANCE); adapterFactory.addAdapterFactory(new ResourceItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new BuildingItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new CitygmlItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new GmlItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new XlinkItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new XALItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new TexturedsurfaceItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new AppearanceItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new TransportationItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new ReliefItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new CityfurnitureItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new CityobjectgroupItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new LanduseItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new VegetationItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new WaterbodyItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new GenericsItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new Smil20ItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new LanguageItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new NamespaceItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new ReflectiveItemProviderAdapterFactory()); // Create the command stack that will notify this editor as commands are executed. // BasicCommandStack commandStack = new BasicCommandStack(); // Add a listener to set the most recent command's affected objects to be the selection of the viewer with focus. // commandStack.addCommandStackListener (new CommandStackListener() { public void commandStackChanged(final EventObject event) { getContainer().getDisplay().asyncExec (new Runnable() { public void run() { firePropertyChange(IEditorPart.PROP_DIRTY); // Try to select the affected objects. // Command mostRecentCommand = ((CommandStack)event.getSource()).getMostRecentCommand(); if (mostRecentCommand != null) { setSelectionToViewer(mostRecentCommand.getAffectedObjects()); } if (propertySheetPage != null && !propertySheetPage.getControl().isDisposed()) { propertySheetPage.refresh(); } } }); } }); // Create the editing domain with a special command stack. // editingDomain = new AdapterFactoryEditingDomain(adapterFactory, commandStack, new HashMap<Resource, Boolean>()); } /** * This is here for the listener to be able to call it. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected void firePropertyChange(int action) { super.firePropertyChange(action); } /** * This sets the selection into whichever viewer is active. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setSelectionToViewer(Collection<?> collection) { final Collection<?> theSelection = collection; // Make sure it's okay. // if (theSelection != null && !theSelection.isEmpty()) { Runnable runnable = new Runnable() { public void run() { // Try to select the items in the current content viewer of the editor. // if (currentViewer != null) { currentViewer.setSelection(new StructuredSelection(theSelection.toArray()), true); } } }; getSite().getShell().getDisplay().asyncExec(runnable); } } /** * This returns the editing domain as required by the {@link IEditingDomainProvider} interface. * This is important for implementing the static methods of {@link AdapterFactoryEditingDomain} * and for supporting {@link org.eclipse.emf.edit.ui.action.CommandAction}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EditingDomain getEditingDomain() { return editingDomain; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public class ReverseAdapterFactoryContentProvider extends AdapterFactoryContentProvider { /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ReverseAdapterFactoryContentProvider(AdapterFactory adapterFactory) { super(adapterFactory); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object [] getElements(Object object) { Object parent = super.getParent(object); return (parent == null ? Collections.EMPTY_SET : Collections.singleton(parent)).toArray(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object [] getChildren(Object object) { Object parent = super.getParent(object); return (parent == null ? Collections.EMPTY_SET : Collections.singleton(parent)).toArray(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean hasChildren(Object object) { Object parent = super.getParent(object); return parent != null; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object getParent(Object object) { return null; } } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setCurrentViewerPane(ViewerPane viewerPane) { if (currentViewerPane != viewerPane) { if (currentViewerPane != null) { currentViewerPane.showFocus(false); } currentViewerPane = viewerPane; } setCurrentViewer(currentViewerPane.getViewer()); } /** * This makes sure that one content viewer, either for the current page or the outline view, if it has focus, * is the current one. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setCurrentViewer(Viewer viewer) { // If it is changing... // if (currentViewer != viewer) { if (selectionChangedListener == null) { // Create the listener on demand. // selectionChangedListener = new ISelectionChangedListener() { // This just notifies those things that are affected by the section. // public void selectionChanged(SelectionChangedEvent selectionChangedEvent) { setSelection(selectionChangedEvent.getSelection()); } }; } // Stop listening to the old one. // if (currentViewer != null) { currentViewer.removeSelectionChangedListener(selectionChangedListener); } // Start listening to the new one. // if (viewer != null) { viewer.addSelectionChangedListener(selectionChangedListener); } // Remember it. // currentViewer = viewer; // Set the editors selection based on the current viewer's selection. // setSelection(currentViewer == null ? StructuredSelection.EMPTY : currentViewer.getSelection()); } } /** * This returns the viewer as required by the {@link IViewerProvider} interface. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Viewer getViewer() { return currentViewer; } /** * This creates a context menu for the viewer and adds a listener as well registering the menu for extension. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void createContextMenuFor(StructuredViewer viewer) { MenuManager contextMenu = new MenuManager("#PopUp"); contextMenu.add(new Separator("additions")); contextMenu.setRemoveAllWhenShown(true); contextMenu.addMenuListener(this); Menu menu= contextMenu.createContextMenu(viewer.getControl()); viewer.getControl().setMenu(menu); getSite().registerContextMenu(contextMenu, new UnwrappingSelectionProvider(viewer)); int dndOperations = DND.DROP_COPY | DND.DROP_MOVE | DND.DROP_LINK; Transfer[] transfers = new Transfer[] { LocalTransfer.getInstance() }; viewer.addDragSupport(dndOperations, transfers, new ViewerDragAdapter(viewer)); viewer.addDropSupport(dndOperations, transfers, new EditingDomainViewerDropAdapter(editingDomain, viewer)); } /** * This is the method called to load a resource into the editing domain's resource set based on the editor's input. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void createModel() { URI resourceURI = EditUIUtil.getURI(getEditorInput()); Exception exception = null; Resource resource = null; try { // Load the resource through the editing domain. // resource = editingDomain.getResourceSet().getResource(resourceURI, true); } catch (Exception e) { exception = e; resource = editingDomain.getResourceSet().getResource(resourceURI, false); } Diagnostic diagnostic = analyzeResourceProblems(resource, exception); if (diagnostic.getSeverity() != Diagnostic.OK) { resourceToDiagnosticMap.put(resource, analyzeResourceProblems(resource, exception)); } editingDomain.getResourceSet().eAdapters().add(problemIndicationAdapter); } /** * Returns a diagnostic describing the errors and warnings listed in the resource * and the specified exception (if any). * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Diagnostic analyzeResourceProblems(Resource resource, Exception exception) { if (!resource.getErrors().isEmpty() || !resource.getWarnings().isEmpty()) { BasicDiagnostic basicDiagnostic = new BasicDiagnostic (Diagnostic.ERROR, "de.hub.citygml.emf.ecore.editor", 0, getString("_UI_CreateModelError_message", resource.getURI()), new Object [] { exception == null ? (Object)resource : exception }); basicDiagnostic.merge(EcoreUtil.computeDiagnostic(resource, true)); return basicDiagnostic; } else if (exception != null) { return new BasicDiagnostic (Diagnostic.ERROR, "de.hub.citygml.emf.ecore.editor", 0, getString("_UI_CreateModelError_message", resource.getURI()), new Object[] { exception }); } else { return Diagnostic.OK_INSTANCE; } } /** * This is the method used by the framework to install your own controls. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void createPages() { // Creates the model from the editor input // createModel(); // Only creates the other pages if there is something that can be edited // if (!getEditingDomain().getResourceSet().getResources().isEmpty()) { // Create a page for the selection tree view. // { ViewerPane viewerPane = new ViewerPane(getSite().getPage(), GmlEditor.this) { @Override public Viewer createViewer(Composite composite) { Tree tree = new Tree(composite, SWT.MULTI); TreeViewer newTreeViewer = new TreeViewer(tree); return newTreeViewer; } @Override public void requestActivation() { super.requestActivation(); setCurrentViewerPane(this); } }; viewerPane.createControl(getContainer()); selectionViewer = (TreeViewer)viewerPane.getViewer(); selectionViewer.setContentProvider(new AdapterFactoryContentProvider(adapterFactory)); selectionViewer.setLabelProvider(new AdapterFactoryLabelProvider(adapterFactory)); selectionViewer.setInput(editingDomain.getResourceSet()); selectionViewer.setSelection(new StructuredSelection(editingDomain.getResourceSet().getResources().get(0)), true); viewerPane.setTitle(editingDomain.getResourceSet()); new AdapterFactoryTreeEditor(selectionViewer.getTree(), adapterFactory); createContextMenuFor(selectionViewer); int pageIndex = addPage(viewerPane.getControl()); setPageText(pageIndex, getString("_UI_SelectionPage_label")); } // Create a page for the parent tree view. // { ViewerPane viewerPane = new ViewerPane(getSite().getPage(), GmlEditor.this) { @Override public Viewer createViewer(Composite composite) { Tree tree = new Tree(composite, SWT.MULTI); TreeViewer newTreeViewer = new TreeViewer(tree); return newTreeViewer; } @Override public void requestActivation() { super.requestActivation(); setCurrentViewerPane(this); } }; viewerPane.createControl(getContainer()); parentViewer = (TreeViewer)viewerPane.getViewer(); parentViewer.setAutoExpandLevel(30); parentViewer.setContentProvider(new ReverseAdapterFactoryContentProvider(adapterFactory)); parentViewer.setLabelProvider(new AdapterFactoryLabelProvider(adapterFactory)); createContextMenuFor(parentViewer); int pageIndex = addPage(viewerPane.getControl()); setPageText(pageIndex, getString("_UI_ParentPage_label")); } // This is the page for the list viewer // { ViewerPane viewerPane = new ViewerPane(getSite().getPage(), GmlEditor.this) { @Override public Viewer createViewer(Composite composite) { return new ListViewer(composite); } @Override public void requestActivation() { super.requestActivation(); setCurrentViewerPane(this); } }; viewerPane.createControl(getContainer()); listViewer = (ListViewer)viewerPane.getViewer(); listViewer.setContentProvider(new AdapterFactoryContentProvider(adapterFactory)); listViewer.setLabelProvider(new AdapterFactoryLabelProvider(adapterFactory)); createContextMenuFor(listViewer); int pageIndex = addPage(viewerPane.getControl()); setPageText(pageIndex, getString("_UI_ListPage_label")); } // This is the page for the tree viewer // { ViewerPane viewerPane = new ViewerPane(getSite().getPage(), GmlEditor.this) { @Override public Viewer createViewer(Composite composite) { return new TreeViewer(composite); } @Override public void requestActivation() { super.requestActivation(); setCurrentViewerPane(this); } }; viewerPane.createControl(getContainer()); treeViewer = (TreeViewer)viewerPane.getViewer(); treeViewer.setContentProvider(new AdapterFactoryContentProvider(adapterFactory)); treeViewer.setLabelProvider(new AdapterFactoryLabelProvider(adapterFactory)); new AdapterFactoryTreeEditor(treeViewer.getTree(), adapterFactory); createContextMenuFor(treeViewer); int pageIndex = addPage(viewerPane.getControl()); setPageText(pageIndex, getString("_UI_TreePage_label")); } // This is the page for the table viewer. // { ViewerPane viewerPane = new ViewerPane(getSite().getPage(), GmlEditor.this) { @Override public Viewer createViewer(Composite composite) { return new TableViewer(composite); } @Override public void requestActivation() { super.requestActivation(); setCurrentViewerPane(this); } }; viewerPane.createControl(getContainer()); tableViewer = (TableViewer)viewerPane.getViewer(); Table table = tableViewer.getTable(); TableLayout layout = new TableLayout(); table.setLayout(layout); table.setHeaderVisible(true); table.setLinesVisible(true); TableColumn objectColumn = new TableColumn(table, SWT.NONE); layout.addColumnData(new ColumnWeightData(3, 100, true)); objectColumn.setText(getString("_UI_ObjectColumn_label")); objectColumn.setResizable(true); TableColumn selfColumn = new TableColumn(table, SWT.NONE); layout.addColumnData(new ColumnWeightData(2, 100, true)); selfColumn.setText(getString("_UI_SelfColumn_label")); selfColumn.setResizable(true); tableViewer.setColumnProperties(new String [] {"a", "b"}); tableViewer.setContentProvider(new AdapterFactoryContentProvider(adapterFactory)); tableViewer.setLabelProvider(new AdapterFactoryLabelProvider(adapterFactory)); createContextMenuFor(tableViewer); int pageIndex = addPage(viewerPane.getControl()); setPageText(pageIndex, getString("_UI_TablePage_label")); } // This is the page for the table tree viewer. // { ViewerPane viewerPane = new ViewerPane(getSite().getPage(), GmlEditor.this) { @Override public Viewer createViewer(Composite composite) { return new TreeViewer(composite); } @Override public void requestActivation() { super.requestActivation(); setCurrentViewerPane(this); } }; viewerPane.createControl(getContainer()); treeViewerWithColumns = (TreeViewer)viewerPane.getViewer(); Tree tree = treeViewerWithColumns.getTree(); tree.setLayoutData(new FillLayout()); tree.setHeaderVisible(true); tree.setLinesVisible(true); TreeColumn objectColumn = new TreeColumn(tree, SWT.NONE); objectColumn.setText(getString("_UI_ObjectColumn_label")); objectColumn.setResizable(true); objectColumn.setWidth(250); TreeColumn selfColumn = new TreeColumn(tree, SWT.NONE); selfColumn.setText(getString("_UI_SelfColumn_label")); selfColumn.setResizable(true); selfColumn.setWidth(200); treeViewerWithColumns.setColumnProperties(new String [] {"a", "b"}); treeViewerWithColumns.setContentProvider(new AdapterFactoryContentProvider(adapterFactory)); treeViewerWithColumns.setLabelProvider(new AdapterFactoryLabelProvider(adapterFactory)); createContextMenuFor(treeViewerWithColumns); int pageIndex = addPage(viewerPane.getControl()); setPageText(pageIndex, getString("_UI_TreeWithColumnsPage_label")); } getSite().getShell().getDisplay().asyncExec (new Runnable() { public void run() { setActivePage(0); } }); } // Ensures that this editor will only display the page's tab // area if there are more than one page // getContainer().addControlListener (new ControlAdapter() { boolean guard = false; @Override public void controlResized(ControlEvent event) { if (!guard) { guard = true; hideTabs(); guard = false; } } }); getSite().getShell().getDisplay().asyncExec (new Runnable() { public void run() { updateProblemIndication(); } }); } /** * If there is just one page in the multi-page editor part, * this hides the single tab at the bottom. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void hideTabs() { if (getPageCount() <= 1) { setPageText(0, ""); if (getContainer() instanceof CTabFolder) { ((CTabFolder)getContainer()).setTabHeight(1); Point point = getContainer().getSize(); getContainer().setSize(point.x, point.y + 6); } } } /** * If there is more than one page in the multi-page editor part, * this shows the tabs at the bottom. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void showTabs() { if (getPageCount() > 1) { setPageText(0, getString("_UI_SelectionPage_label")); if (getContainer() instanceof CTabFolder) { ((CTabFolder)getContainer()).setTabHeight(SWT.DEFAULT); Point point = getContainer().getSize(); getContainer().setSize(point.x, point.y - 6); } } } /** * This is used to track the active viewer. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected void pageChange(int pageIndex) { super.pageChange(pageIndex); if (contentOutlinePage != null) { handleContentOutlineSelection(contentOutlinePage.getSelection()); } } /** * This is how the framework determines which interfaces we implement. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("rawtypes") @Override public Object getAdapter(Class key) { if (key.equals(IContentOutlinePage.class)) { return showOutlineView() ? getContentOutlinePage() : null; } else if (key.equals(IPropertySheetPage.class)) { return getPropertySheetPage(); } else if (key.equals(IGotoMarker.class)) { return this; } else { return super.getAdapter(key); } } /** * This accesses a cached version of the content outliner. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public IContentOutlinePage getContentOutlinePage() { if (contentOutlinePage == null) { // The content outline is just a tree. // class MyContentOutlinePage extends ContentOutlinePage { @Override public void createControl(Composite parent) { super.createControl(parent); contentOutlineViewer = getTreeViewer(); contentOutlineViewer.addSelectionChangedListener(this); // Set up the tree viewer. // contentOutlineViewer.setContentProvider(new AdapterFactoryContentProvider(adapterFactory)); contentOutlineViewer.setLabelProvider(new AdapterFactoryLabelProvider(adapterFactory)); contentOutlineViewer.setInput(editingDomain.getResourceSet()); // Make sure our popups work. // createContextMenuFor(contentOutlineViewer); if (!editingDomain.getResourceSet().getResources().isEmpty()) { // Select the root object in the view. // contentOutlineViewer.setSelection(new StructuredSelection(editingDomain.getResourceSet().getResources().get(0)), true); } } @Override public void makeContributions(IMenuManager menuManager, IToolBarManager toolBarManager, IStatusLineManager statusLineManager) { super.makeContributions(menuManager, toolBarManager, statusLineManager); contentOutlineStatusLineManager = statusLineManager; } @Override public void setActionBars(IActionBars actionBars) { super.setActionBars(actionBars); getActionBarContributor().shareGlobalActions(this, actionBars); } } contentOutlinePage = new MyContentOutlinePage(); // Listen to selection so that we can handle it is a special way. // contentOutlinePage.addSelectionChangedListener (new ISelectionChangedListener() { // This ensures that we handle selections correctly. // public void selectionChanged(SelectionChangedEvent event) { handleContentOutlineSelection(event.getSelection()); } }); } return contentOutlinePage; } /** * This accesses a cached version of the property sheet. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public IPropertySheetPage getPropertySheetPage() { if (propertySheetPage == null) { propertySheetPage = new ExtendedPropertySheetPage(editingDomain) { @Override public void setSelectionToViewer(List<?> selection) { GmlEditor.this.setSelectionToViewer(selection); GmlEditor.this.setFocus(); } @Override public void setActionBars(IActionBars actionBars) { super.setActionBars(actionBars); getActionBarContributor().shareGlobalActions(this, actionBars); } }; propertySheetPage.setPropertySourceProvider(new AdapterFactoryContentProvider(adapterFactory)); } return propertySheetPage; } /** * This deals with how we want selection in the outliner to affect the other views. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void handleContentOutlineSelection(ISelection selection) { if (currentViewerPane != null && !selection.isEmpty() && selection instanceof IStructuredSelection) { Iterator<?> selectedElements = ((IStructuredSelection)selection).iterator(); if (selectedElements.hasNext()) { // Get the first selected element. // Object selectedElement = selectedElements.next(); // If it's the selection viewer, then we want it to select the same selection as this selection. // if (currentViewerPane.getViewer() == selectionViewer) { ArrayList<Object> selectionList = new ArrayList<Object>(); selectionList.add(selectedElement); while (selectedElements.hasNext()) { selectionList.add(selectedElements.next()); } // Set the selection to the widget. // selectionViewer.setSelection(new StructuredSelection(selectionList)); } else { // Set the input to the widget. // if (currentViewerPane.getViewer().getInput() != selectedElement) { currentViewerPane.getViewer().setInput(selectedElement); currentViewerPane.setTitle(selectedElement); } } } } } /** * This is for implementing {@link IEditorPart} and simply tests the command stack. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean isDirty() { return ((BasicCommandStack)editingDomain.getCommandStack()).isSaveNeeded(); } /** * This is for implementing {@link IEditorPart} and simply saves the model file. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void doSave(IProgressMonitor progressMonitor) { // Save only resources that have actually changed. // final Map<Object, Object> saveOptions = new HashMap<Object, Object>(); saveOptions.put(Resource.OPTION_SAVE_ONLY_IF_CHANGED, Resource.OPTION_SAVE_ONLY_IF_CHANGED_MEMORY_BUFFER); // Do the work within an operation because this is a long running activity that modifies the workbench. // WorkspaceModifyOperation operation = new WorkspaceModifyOperation() { // This is the method that gets invoked when the operation runs. // @Override public void execute(IProgressMonitor monitor) { // Save the resources to the file system. // boolean first = true; for (Resource resource : editingDomain.getResourceSet().getResources()) { if ((first || !resource.getContents().isEmpty() || isPersisted(resource)) && !editingDomain.isReadOnly(resource)) { try { long timeStamp = resource.getTimeStamp(); resource.save(saveOptions); if (resource.getTimeStamp() != timeStamp) { savedResources.add(resource); } } catch (Exception exception) { resourceToDiagnosticMap.put(resource, analyzeResourceProblems(resource, exception)); } first = false; } } } }; updateProblemIndication = false; try { // This runs the options, and shows progress. // new ProgressMonitorDialog(getSite().getShell()).run(true, false, operation); // Refresh the necessary state. // ((BasicCommandStack)editingDomain.getCommandStack()).saveIsDone(); firePropertyChange(IEditorPart.PROP_DIRTY); } catch (Exception exception) { // Something went wrong that shouldn't. // CityGMLEditorPlugin.INSTANCE.log(exception); } updateProblemIndication = true; updateProblemIndication(); } /** * This returns whether something has been persisted to the URI of the specified resource. * The implementation uses the URI converter from the editor's resource set to try to open an input stream. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected boolean isPersisted(Resource resource) { boolean result = false; try { InputStream stream = editingDomain.getResourceSet().getURIConverter().createInputStream(resource.getURI()); if (stream != null) { result = true; stream.close(); } } catch (IOException e) { // Ignore } return result; } /** * This always returns true because it is not currently supported. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean isSaveAsAllowed() { return true; } /** * This also changes the editor's input. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void doSaveAs() { SaveAsDialog saveAsDialog = new SaveAsDialog(getSite().getShell()); saveAsDialog.open(); IPath path = saveAsDialog.getResult(); if (path != null) { IFile file = ResourcesPlugin.getWorkspace().getRoot().getFile(path); if (file != null) { doSaveAs(URI.createPlatformResourceURI(file.getFullPath().toString(), true), new FileEditorInput(file)); } } } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void doSaveAs(URI uri, IEditorInput editorInput) { (editingDomain.getResourceSet().getResources().get(0)).setURI(uri); setInputWithNotify(editorInput); setPartName(editorInput.getName()); IProgressMonitor progressMonitor = getActionBars().getStatusLineManager() != null ? getActionBars().getStatusLineManager().getProgressMonitor() : new NullProgressMonitor(); doSave(progressMonitor); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void gotoMarker(IMarker marker) { try { if (marker.getType().equals(EValidator.MARKER)) { String uriAttribute = marker.getAttribute(EValidator.URI_ATTRIBUTE, null); if (uriAttribute != null) { URI uri = URI.createURI(uriAttribute); EObject eObject = editingDomain.getResourceSet().getEObject(uri, true); if (eObject != null) { setSelectionToViewer(Collections.singleton(editingDomain.getWrapper(eObject))); } } } } catch (CoreException exception) { CityGMLEditorPlugin.INSTANCE.log(exception); } } /** * This is called during startup. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void init(IEditorSite site, IEditorInput editorInput) { setSite(site); setInputWithNotify(editorInput); setPartName(editorInput.getName()); site.setSelectionProvider(this); site.getPage().addPartListener(partListener); ResourcesPlugin.getWorkspace().addResourceChangeListener(resourceChangeListener, IResourceChangeEvent.POST_CHANGE); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void setFocus() { if (currentViewerPane != null) { currentViewerPane.setFocus(); } else { getControl(getActivePage()).setFocus(); } } /** * This implements {@link org.eclipse.jface.viewers.ISelectionProvider}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void addSelectionChangedListener(ISelectionChangedListener listener) { selectionChangedListeners.add(listener); } /** * This implements {@link org.eclipse.jface.viewers.ISelectionProvider}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void removeSelectionChangedListener(ISelectionChangedListener listener) { selectionChangedListeners.remove(listener); } /** * This implements {@link org.eclipse.jface.viewers.ISelectionProvider} to return this editor's overall selection. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ISelection getSelection() { return editorSelection; } /** * This implements {@link org.eclipse.jface.viewers.ISelectionProvider} to set this editor's overall selection. * Calling this result will notify the listeners. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setSelection(ISelection selection) { editorSelection = selection; for (ISelectionChangedListener listener : selectionChangedListeners) { listener.selectionChanged(new SelectionChangedEvent(this, selection)); } setStatusLineManager(selection); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setStatusLineManager(ISelection selection) { IStatusLineManager statusLineManager = currentViewer != null && currentViewer == contentOutlineViewer ? contentOutlineStatusLineManager : getActionBars().getStatusLineManager(); if (statusLineManager != null) { if (selection instanceof IStructuredSelection) { Collection<?> collection = ((IStructuredSelection)selection).toList(); switch (collection.size()) { case 0: { statusLineManager.setMessage(getString("_UI_NoObjectSelected")); break; } case 1: { String text = new AdapterFactoryItemDelegator(adapterFactory).getText(collection.iterator().next()); statusLineManager.setMessage(getString("_UI_SingleObjectSelected", text)); break; } default: { statusLineManager.setMessage(getString("_UI_MultiObjectSelected", Integer.toString(collection.size()))); break; } } } else { statusLineManager.setMessage(""); } } } /** * This looks up a string in the plugin's plugin.properties file. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ private static String getString(String key) { return CityGMLEditorPlugin.INSTANCE.getString(key); } /** * This looks up a string in plugin.properties, making a substitution. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ private static String getString(String key, Object s1) { return CityGMLEditorPlugin.INSTANCE.getString(key, new Object [] { s1 }); } /** * This implements {@link org.eclipse.jface.action.IMenuListener} to help fill the context menus with contributions from the Edit menu. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void menuAboutToShow(IMenuManager menuManager) { ((IMenuListener)getEditorSite().getActionBarContributor()).menuAboutToShow(menuManager); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EditingDomainActionBarContributor getActionBarContributor() { return (EditingDomainActionBarContributor)getEditorSite().getActionBarContributor(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public IActionBars getActionBars() { return getActionBarContributor().getActionBars(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public AdapterFactory getAdapterFactory() { return adapterFactory; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void dispose() { updateProblemIndication = false; ResourcesPlugin.getWorkspace().removeResourceChangeListener(resourceChangeListener); getSite().getPage().removePartListener(partListener); adapterFactory.dispose(); if (getActionBarContributor().getActiveEditor() == this) { getActionBarContributor().setActiveEditor(null); } if (propertySheetPage != null) { propertySheetPage.dispose(); } if (contentOutlinePage != null) { contentOutlinePage.dispose(); } super.dispose(); } /** * Returns whether the outline view should be presented to the user. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected boolean showOutlineView() { return true; } }
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.source; import static com.google.android.exoplayer2.util.Assertions.checkNotNull; import android.net.Uri; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.Timeline; import com.google.android.exoplayer2.util.Assertions; /** * A {@link Timeline} consisting of a single period and static window. */ public final class SinglePeriodTimeline extends Timeline { private static final Object UID = new Object(); private static final MediaItem MEDIA_ITEM = new MediaItem.Builder().setMediaId("SinglePeriodTimeline").setUri(Uri.EMPTY).build(); private final long presentationStartTimeMs; private final long windowStartTimeMs; private final long elapsedRealtimeEpochOffsetMs; private final long periodDurationUs; private final long windowDurationUs; private final long windowPositionInPeriodUs; private final long windowDefaultStartPositionUs; private final boolean isSeekable; private final boolean isDynamic; @Nullable private final Object manifest; @Nullable private final MediaItem mediaItem; @Nullable private final MediaItem.LiveConfiguration liveConfiguration; /** * @deprecated Use {@link #SinglePeriodTimeline(long, boolean, boolean, boolean, Object, * MediaItem)} instead. */ // Provide backwards compatibility. @SuppressWarnings("deprecation") @Deprecated public SinglePeriodTimeline( long durationUs, boolean isSeekable, boolean isDynamic, boolean isLive, @Nullable Object manifest, @Nullable Object tag) { this( durationUs, durationUs, /* windowPositionInPeriodUs= */ 0, /* windowDefaultStartPositionUs= */ 0, isSeekable, isDynamic, isLive, manifest, tag); } /** * Creates a timeline containing a single period and a window that spans it. * * @param durationUs The duration of the period, in microseconds. * @param isSeekable Whether seeking is supported within the period. * @param isDynamic Whether the window may change when the timeline is updated. * @param useLiveConfiguration Whether the window is live and {@link MediaItem#liveConfiguration} * is used to configure live playback behaviour. * @param manifest The manifest. May be {@code null}. * @param mediaItem A media item used for {@link Window#mediaItem}. */ public SinglePeriodTimeline( long durationUs, boolean isSeekable, boolean isDynamic, boolean useLiveConfiguration, @Nullable Object manifest, MediaItem mediaItem) { this( durationUs, durationUs, /* windowPositionInPeriodUs= */ 0, /* windowDefaultStartPositionUs= */ 0, isSeekable, isDynamic, useLiveConfiguration, manifest, mediaItem); } /** * @deprecated Use {@link #SinglePeriodTimeline(long, long, long, long, boolean, boolean, boolean, * Object, MediaItem)} instead. */ // Provide backwards compatibility. @SuppressWarnings("deprecation") @Deprecated public SinglePeriodTimeline( long periodDurationUs, long windowDurationUs, long windowPositionInPeriodUs, long windowDefaultStartPositionUs, boolean isSeekable, boolean isDynamic, boolean isLive, @Nullable Object manifest, @Nullable Object tag) { this( /* presentationStartTimeMs= */ C.TIME_UNSET, /* windowStartTimeMs= */ C.TIME_UNSET, /* elapsedRealtimeEpochOffsetMs= */ C.TIME_UNSET, periodDurationUs, windowDurationUs, windowPositionInPeriodUs, windowDefaultStartPositionUs, isSeekable, isDynamic, isLive, manifest, tag); } /** * Creates a timeline with one period, and a window of known duration starting at a specified * position in the period. * * @param periodDurationUs The duration of the period in microseconds. * @param windowDurationUs The duration of the window in microseconds. * @param windowPositionInPeriodUs The position of the start of the window in the period, in * microseconds. * @param windowDefaultStartPositionUs The default position relative to the start of the window at * which to begin playback, in microseconds. * @param isSeekable Whether seeking is supported within the window. * @param isDynamic Whether the window may change when the timeline is updated. * @param useLiveConfiguration Whether the window is live and {@link MediaItem#liveConfiguration} * is used to configure live playback behaviour. * @param manifest The manifest. May be (@code null}. * @param mediaItem A media item used for {@link Timeline.Window#mediaItem}. */ public SinglePeriodTimeline( long periodDurationUs, long windowDurationUs, long windowPositionInPeriodUs, long windowDefaultStartPositionUs, boolean isSeekable, boolean isDynamic, boolean useLiveConfiguration, @Nullable Object manifest, MediaItem mediaItem) { this( /* presentationStartTimeMs= */ C.TIME_UNSET, /* windowStartTimeMs= */ C.TIME_UNSET, /* elapsedRealtimeEpochOffsetMs= */ C.TIME_UNSET, periodDurationUs, windowDurationUs, windowPositionInPeriodUs, windowDefaultStartPositionUs, isSeekable, isDynamic, manifest, mediaItem, useLiveConfiguration ? mediaItem.liveConfiguration : null); } /** * @deprecated Use {@link #SinglePeriodTimeline(long, long, long, long, long, long, long, boolean, * boolean, Object, MediaItem, MediaItem.LiveConfiguration)} instead. */ @Deprecated public SinglePeriodTimeline( long presentationStartTimeMs, long windowStartTimeMs, long elapsedRealtimeEpochOffsetMs, long periodDurationUs, long windowDurationUs, long windowPositionInPeriodUs, long windowDefaultStartPositionUs, boolean isSeekable, boolean isDynamic, boolean isLive, @Nullable Object manifest, @Nullable Object tag) { this( presentationStartTimeMs, windowStartTimeMs, elapsedRealtimeEpochOffsetMs, periodDurationUs, windowDurationUs, windowPositionInPeriodUs, windowDefaultStartPositionUs, isSeekable, isDynamic, manifest, MEDIA_ITEM.buildUpon().setTag(tag).build(), isLive ? MEDIA_ITEM.liveConfiguration : null); } /** * Creates a timeline with one period, and a window of known duration starting at a specified * position in the period. * * @param presentationStartTimeMs The start time of the presentation in milliseconds since the * epoch, or {@link C#TIME_UNSET} if unknown or not applicable. * @param windowStartTimeMs The window's start time in milliseconds since the epoch, or {@link * C#TIME_UNSET} if unknown or not applicable. * @param elapsedRealtimeEpochOffsetMs The offset between {@link * android.os.SystemClock#elapsedRealtime()} and the time since the Unix epoch according to * the clock of the media origin server, or {@link C#TIME_UNSET} if unknown or not applicable. * @param periodDurationUs The duration of the period in microseconds. * @param windowDurationUs The duration of the window in microseconds. * @param windowPositionInPeriodUs The position of the start of the window in the period, in * microseconds. * @param windowDefaultStartPositionUs The default position relative to the start of the window at * which to begin playback, in microseconds. * @param isSeekable Whether seeking is supported within the window. * @param isDynamic Whether the window may change when the timeline is updated. * @param manifest The manifest. May be {@code null}. * @param mediaItem A media item used for {@link Timeline.Window#mediaItem}. * @param liveConfiguration The configuration for live playback behaviour, or {@code null} if the * window is not live. */ public SinglePeriodTimeline( long presentationStartTimeMs, long windowStartTimeMs, long elapsedRealtimeEpochOffsetMs, long periodDurationUs, long windowDurationUs, long windowPositionInPeriodUs, long windowDefaultStartPositionUs, boolean isSeekable, boolean isDynamic, @Nullable Object manifest, MediaItem mediaItem, @Nullable MediaItem.LiveConfiguration liveConfiguration) { this.presentationStartTimeMs = presentationStartTimeMs; this.windowStartTimeMs = windowStartTimeMs; this.elapsedRealtimeEpochOffsetMs = elapsedRealtimeEpochOffsetMs; this.periodDurationUs = periodDurationUs; this.windowDurationUs = windowDurationUs; this.windowPositionInPeriodUs = windowPositionInPeriodUs; this.windowDefaultStartPositionUs = windowDefaultStartPositionUs; this.isSeekable = isSeekable; this.isDynamic = isDynamic; this.manifest = manifest; this.mediaItem = checkNotNull(mediaItem); this.liveConfiguration = liveConfiguration; } @Override public int getWindowCount() { return 1; } // Provide backwards compatibility. @Override public Window getWindow(int windowIndex, Window window, long defaultPositionProjectionUs) { Assertions.checkIndex(windowIndex, 0, 1); long windowDefaultStartPositionUs = this.windowDefaultStartPositionUs; if (isDynamic && defaultPositionProjectionUs != 0) { if (windowDurationUs == C.TIME_UNSET) { // Don't allow projection into a window that has an unknown duration. windowDefaultStartPositionUs = C.TIME_UNSET; } else { windowDefaultStartPositionUs += defaultPositionProjectionUs; if (windowDefaultStartPositionUs > windowDurationUs) { // The projection takes us beyond the end of the window. windowDefaultStartPositionUs = C.TIME_UNSET; } } } return window.set( Window.SINGLE_WINDOW_UID, mediaItem, manifest, presentationStartTimeMs, windowStartTimeMs, elapsedRealtimeEpochOffsetMs, isSeekable, isDynamic, liveConfiguration, windowDefaultStartPositionUs, windowDurationUs, /* firstPeriodIndex= */ 0, /* lastPeriodIndex= */ 0, windowPositionInPeriodUs); } @Override public int getPeriodCount() { return 1; } @Override public Period getPeriod(int periodIndex, Period period, boolean setIds) { Assertions.checkIndex(periodIndex, 0, 1); @Nullable Object uid = setIds ? UID : null; return period.set(/* id= */ null, uid, 0, periodDurationUs, -windowPositionInPeriodUs); } @Override public int getIndexOfPeriod(Object uid) { return UID.equals(uid) ? 0 : C.INDEX_UNSET; } @Override public Object getUidOfPeriod(int periodIndex) { Assertions.checkIndex(periodIndex, 0, 1); return UID; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ro.nextreports.server.web.report; import java.io.Serializable; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.wicket.Component; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.form.AjaxFormComponentUpdatingBehavior; import org.apache.wicket.datetime.markup.html.form.DateTextField; import org.apache.wicket.extensions.markup.html.form.palette.Palette; import org.apache.wicket.extensions.markup.html.form.palette.component.Recorder; import org.apache.wicket.extensions.yui.calendar.DateField; import org.apache.wicket.extensions.yui.calendar.DatePicker; import org.apache.wicket.extensions.yui.calendar.DateTimeField; import org.apache.wicket.markup.head.IHeaderResponse; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.form.CheckBox; import org.apache.wicket.markup.html.form.DropDownChoice; import org.apache.wicket.markup.html.form.IChoiceRenderer; import org.apache.wicket.markup.html.form.TextField; import org.apache.wicket.markup.html.list.ListItem; import org.apache.wicket.markup.html.list.ListView; import org.apache.wicket.markup.html.panel.EmptyPanel; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.model.IModel; import org.apache.wicket.model.LoadableDetachableModel; import org.apache.wicket.model.Model; import org.apache.wicket.model.PropertyModel; import org.apache.wicket.spring.injection.annot.SpringBean; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ro.nextreports.engine.Report; import ro.nextreports.engine.i18n.I18nLanguage; import ro.nextreports.engine.queryexec.IdName; import ro.nextreports.engine.queryexec.QueryParameter; import ro.nextreports.engine.util.DateUtil; import ro.nextreports.engine.util.ParameterUtil; import ro.nextreports.engine.util.StringUtil; import ro.nextreports.server.domain.DataSource; import ro.nextreports.server.domain.ReportRuntimeParameterModel; import ro.nextreports.server.report.next.NextRuntimeParameterModel; import ro.nextreports.server.service.DataSourceService; import ro.nextreports.server.service.ReportService; import ro.nextreports.server.service.StorageService; import ro.nextreports.server.util.ServerUtil; import ro.nextreports.server.web.common.misc.ExtendedPalette; /** * User: mihai.panaitescu * Date: 02-Feb-2010 * Time: 13:17:03 */ public abstract class ParameterRuntimePanel extends Panel { private static final long serialVersionUID = 1L; public static final String USER_PARAM = "__USER__"; @SpringBean protected StorageService storageService; @SpringBean protected ReportService reportService; @SpringBean protected DataSourceService dataSourceService; protected ParameterRuntimeModel runtimeModel; protected List<QueryParameter> paramList; private Map<String, QueryParameter> paramMap; private Map<QueryParameter, Component> paramComponentsMap; protected boolean runNow; private String errorMessage; // In Next, for source parameters default values are not the entire objects (id, name), but only the ids // so we will have to look at selection if the default values can be found in the list of the parameter values // and only if we found them we add them to the model // all dependent parameters that must be initialized after completing default values private transient List<QueryParameter> depParameters = new ArrayList<QueryParameter>(); // because dependent parameter values where not completed, also the default dependent values where not completed // and must be kept for further initialization private transient Map<QueryParameter, List<Serializable>> depDefValues = new HashMap<QueryParameter, List<Serializable>>(); private static final Logger LOG = LoggerFactory.getLogger(ParameterRuntimePanel.class); public ParameterRuntimePanel(String id) { this(id, true); } public ParameterRuntimePanel(String id, boolean runNow) { super(id); this.runNow = runNow; } public ParameterRuntimePanel(String id, ParameterRuntimeModel runtimeModel) { super(id); this.runNow = true; init(runtimeModel); } public abstract void addWicketComponents(); public abstract Report getNextReport(); public abstract I18nLanguage getLocaleLanguage(); public abstract DataSource getDataSource(); protected void init(ParameterRuntimeModel runtimeModel) { this.runtimeModel = runtimeModel; paramMap = ParameterUtil.getUsedNotHiddenParametersMap(getNextReport()); paramList = new ArrayList<QueryParameter>(paramMap.values()); paramComponentsMap = new HashMap<QueryParameter, Component>(); addComponents(); setOutputMarkupId(true); } protected void init(ParameterRuntimeModel runtimeModel, boolean fromGlobalModel) { this.runtimeModel = runtimeModel; paramMap = ParameterUtil.getUsedNotHiddenParametersMap(getNextReport()); // global settings : we may have less parameters (only common parameters) if (fromGlobalModel) { List<String> keys = new ArrayList<String>(); for (Iterator it=runtimeModel.getParameters().keySet().iterator(); it.hasNext();) { keys.add((String)it.next()); } for (Iterator it = paramMap.keySet().iterator(); it.hasNext();) { if (!keys.contains((String)it.next())) { it.remove(); } } } paramList = new ArrayList<QueryParameter>(paramMap.values()); paramComponentsMap = new HashMap<QueryParameter, Component>(); addComponents(); setOutputMarkupId(true); } private void addComponents() { // initialize model for some hidden hard-coded parameters // there is possible that a report can contain only hidden hard-coded parameters! for (QueryParameter parameter : ParameterUtil.getUsedHiddenParametersMap(getNextReport()).values()) { if ((USER_PARAM.equals(parameter.getName()))) { runtimeModel.getParameters().put(parameter.getName(), createRuntimeModel(parameter)); } } // initialize model for all not hidden parameters for (QueryParameter parameter : paramList) { if (!runtimeModel.isEdit()) { runtimeModel.getParameters().put(parameter.getName(), createRuntimeModel(parameter)); } } if (!runtimeModel.isEdit() && (errorMessage == null)) { // if some parameters initialized have default values, their dependent parameters // have to be initialized too for (QueryParameter qp : depParameters) { populateDependentParameters(qp, null, true); } } ListView<QueryParameter> listView = new ListView<QueryParameter>("params", new ArrayList<QueryParameter>(paramMap.values())) { private static final long serialVersionUID = 1L; @Override protected void populateItem(ListItem<QueryParameter> item) { createItem(item); } }; listView.setReuseItems(true); add(listView); addWicketComponents(); if (errorMessage != null) { error(errorMessage); } } @SuppressWarnings("unchecked") protected void createItem(ListItem<QueryParameter> item) { Component currentComponent = null; WebMarkupContainer paletteContainer = new WebMarkupContainer("paletteContainer"); final QueryParameter parameter = item.getModelObject(); final IModel generalModel = new PropertyModel(runtimeModel.getParameters(), parameter.getName() + ".rawValue"); IModel listModel = new PropertyModel(runtimeModel.getParameters(), parameter.getName() + ".valueList"); if (runtimeModel.isEdit()) { populateDependentParameters(parameter, null, true); } final TextField textField = new TextField("txtValue", generalModel); textField.setVisible(false); try { textField.setType(Class.forName(parameter.getValueClassName())); } catch (ClassNotFoundException e) { e.printStackTrace(); LOG.error(e.getMessage(), e); error(e.getMessage()); } final DateTimeField txtTime = new DateTimeField("txtTime", generalModel) { private static final long serialVersionUID = 1L; @Override public IModel<String> getLabel() { return new Model<String>(getParameterName(parameter)); } @Override protected boolean use12HourFormat() { return false; } @Override protected DateTextField newDateTextField(String id, PropertyModel dateFieldModel) { DateTextField f = super.newDateTextField(id, dateFieldModel); // Important must create a new ajaxUpdate behavior (otherwise an error will rise) // DateTextField uses newDateTextField method in constructor (DateField uses it in onBeforeRenderer method) // that's why for DateField is ok to use the same ajax which is added when component is made visible // for DateTextField ajax behavior is added even if the component is not visible! f.add(createAjax(parameter, generalModel, f)); return f; } protected DatePicker newDatePicker() { return new DatePicker() { private static final long serialVersionUID = 1L; @Override protected void configure(final Map<String, Object> widgetProperties, final IHeaderResponse response, final Map<String, Object> initVariables) { super.configure(widgetProperties, response, initVariables); } @Override protected boolean enableMonthYearSelection() { return true; } }; } }; // add ajax update behavior on hours and minutes textfields txtTime.get("hours").add(createAjax(parameter, generalModel, txtTime.get("hours"), "hours")); txtTime.get("minutes").add(createAjax(parameter, generalModel, txtTime.get("minutes"), "minutes")); txtTime.setVisible(false); final DateField txtDate = new DateField("txtDate", generalModel) { private static final long serialVersionUID = 1L; @Override public IModel<String> getLabel() { return new Model<String>(getParameterName(parameter)); } @Override protected DateTextField newDateTextField(java.lang.String id, PropertyModel dateFieldModel) { DateTextField f = super.newDateTextField(id, dateFieldModel); f.add(createAjax(parameter, generalModel, f)); return f; } protected DatePicker newDatePicker() { return new DatePicker() { private static final long serialVersionUID = 1L; @Override protected void configure(final Map<String, Object> widgetProperties, final IHeaderResponse response, final Map<String, Object> initVariables) { super.configure(widgetProperties, response, initVariables); } @Override protected boolean enableMonthYearSelection() { return true; } }; } }; txtDate.setVisible(false); final CheckBox chkBox = new CheckBox("chkBox", generalModel); chkBox.setVisible(false); DropDownChoice downChoice = new DropDownChoice("cmbValue", generalModel, new ArrayList<String>()); downChoice.setVisible(false); if (parameter.getSelection().equalsIgnoreCase(QueryParameter.SINGLE_SELECTION)) { if (parameter.getSource() != null && parameter.getSource().trim().length() > 0) { // combo downChoice = new DropDownChoice("cmbValue", generalModel, new LoadableDetachableModel() { @Override protected Object load() { // for combo default value can be a simple value or selected for a source (is not an IdName so we make it) // a default value from a manual source is an IdName Object obj = generalModel.getObject(); if ((obj != null) && !(obj instanceof IdName) ) { IdName in = new IdName(); in.setId((Serializable)obj); in.setName((Serializable)obj); generalModel.setObject(in); } return runtimeModel.getParameters().get(parameter.getName()).getValues(); } }); if (!parameter.isHidden()) { if (parameter.isMandatory()) { downChoice.setRequired(true); } downChoice.setLabel(new Model<String>(getParameterName(parameter))); downChoice.setVisible(true); } // if parameter is not mandatory, even if we selected something by default, we should allow user to select null values if (!parameter.isMandatory()) { downChoice.setNullValid(true); } currentComponent = downChoice; } else { // not combo if (parameter.getValueClassName().contains("Date")) { if (!parameter.isHidden()) { if (generalModel.getObject() == null) { generalModel.setObject(DateUtil.floor(new Date())); } if (parameter.isMandatory()) { txtDate.setRequired(true); } txtDate.setVisible(true); } currentComponent = txtDate; } else if(parameter.getValueClassName().contains("Timestamp") || parameter.getValueClassName().contains("Time") ) { if (!parameter.isHidden()) { if (generalModel.getObject() == null) { generalModel.setObject(DateUtil.floor(new Date())); } if (parameter.isMandatory()) { txtTime.setRequired(true); } txtTime.setVisible(true); } currentComponent = txtTime; } else if (parameter.getValueClassName().contains("Boolean")) { if (!parameter.isHidden()) { if (parameter.isMandatory()) { chkBox.setRequired(true); } chkBox.setLabel(new Model<String>(getParameterName(parameter))); chkBox.setVisible(true); } currentComponent = chkBox; } else { if (!parameter.isHidden()) { if (parameter.isMandatory()) { textField.setRequired(true); } textField.setLabel(new Model<String>(getParameterName(parameter))); textField.setVisible(true); } currentComponent = textField; } } paletteContainer.add(new EmptyPanel("palette")); } else { if (parameter.getSource() != null && parameter.getSource().trim().length() > 0) { if (!parameter.isHidden()) { Palette palette = createPalette(parameter, listModel, createAjax(parameter)); paletteContainer.add(palette.setOutputMarkupId(true)); currentComponent = palette; } else { paletteContainer.add(new EmptyPanel("palette")); } } else { ManualListPanel list = new ManualListPanel(parameter, listModel, 10, createAjax(parameter)); paletteContainer.add(list.setOutputMarkupId(true)); currentComponent = list; } } paramComponentsMap.put(parameter, currentComponent); // if this parameter has dependent parameters // we must update values for those using an ajax update // for Palette this is done in its class // for DateField and DateTimeField is done on the inner DateTextField if (ParameterUtil.getChildDependentParameters(getNextReport(), parameter).size() > 0) { boolean ajaxAlreadyAdded = (currentComponent instanceof Palette) || (currentComponent instanceof DateField) || (currentComponent instanceof DateTimeField); if (!ajaxAlreadyAdded) { currentComponent.add(createAjax(parameter)); } } String name = getDisplayableParameterName(parameter); Label lbl = new Label("name", name); lbl.setEscapeModelStrings(false); lbl.setOutputMarkupId(true); item.add(lbl); txtTime.setOutputMarkupId(true); item.add(txtTime); txtDate.setOutputMarkupId(true); item.add(txtDate); downChoice.setOutputMarkupId(true); item.add(downChoice); paletteContainer.setOutputMarkupId(true); item.add(paletteContainer); textField.setOutputMarkupId(true); item.add(textField); chkBox.setOutputMarkupId(true); item.add(chkBox); } private NextRuntimeParameterModel createRuntimeModel(QueryParameter parameter) { boolean isMultipleSelection = parameter.getSelection().equalsIgnoreCase(QueryParameter.MULTIPLE_SELECTION); NextRuntimeParameterModel runtimeModel = new NextRuntimeParameterModel(parameter.getName(), getParameterName(parameter), isMultipleSelection); runtimeModel.setMandatory(parameter.isMandatory()); List<IdName> values = new ArrayList<IdName>(); // set in the model only the values for parameters which are not dependent if ((errorMessage == null) && (parameter.getSource() != null) && (parameter.getSource().trim().length() > 0) && !parameter.isDependent()) { try { values = dataSourceService.getParameterValues(getDataSource(), parameter); } catch (Exception e) { e.printStackTrace(); errorMessage = "Get parameter values for : " + parameter.getName() + " > " + e.getMessage(); LOG.error(errorMessage, e); error(e.getMessage()); } } runtimeModel.setParameterValues(values); if (errorMessage == null) { initDefaultValues(runtimeModel, parameter, values); } return runtimeModel; } private void initDefaultValues(NextRuntimeParameterModel runtimeModel, QueryParameter parameter, List<IdName> values) { List<Serializable> defaultValues = new ArrayList<Serializable>(); if ((parameter.getDefaultValues() != null)) { defaultValues = parameter.getDefaultValues(); } if ((parameter.getDefaultSource() != null) && (parameter.getDefaultSource().trim().length() > 0)) { try { defaultValues = dataSourceService.getDefaultSourceValues(getDataSource(), parameter); } catch (Exception e) { e.printStackTrace(); errorMessage = "Get default source values for parameter : " + parameter.getName() + " > " + e.getMessage(); LOG.error(errorMessage, e); error(e.getMessage()); } } depDefValues.put(parameter, defaultValues); if (defaultValues.size() == 0) { return; } // for source parameters, the values are not entire objects (id, name) but only the ids // so we have to look in the parameter values for them if ((parameter.getSource() != null) && !parameter.getSource().trim().equals("")) { defaultValues = getSelectedValues(values, defaultValues); } if (defaultValues.size() == 0) { return; } boolean populateDependent = false; if (USER_PARAM.equals(parameter.getName())) { runtimeModel.setRawValue(ServerUtil.getUsernameWithoutRealm()); populateDependent = true; } else { if (QueryParameter.MULTIPLE_SELECTION.equals(parameter.getSelection())) { if (defaultValues.size() == 0) { runtimeModel.setValueList(new ArrayList<Object>()); } else { ArrayList<Object> list = new ArrayList<Object>(); list.addAll(defaultValues); runtimeModel.setValueList(list); populateDependent = true; } } else { if (defaultValues.size() > 0) { runtimeModel.setRawValue(defaultValues.get(0)); populateDependent = true; } } } // mark the dependent parameters that must be populated after initilize the default values if (populateDependent) { this.runtimeModel.getParameters().put(parameter.getName(), runtimeModel); depParameters.add(parameter); } } // a default value must be simple java object , or an IdName but only with the id private List<Serializable> getSelectedValues(List<IdName> values, List<Serializable> defaultValues) { List<Serializable> selectedValues = new ArrayList<Serializable>(); if (defaultValues == null) { return selectedValues; } for (Serializable s : defaultValues) { for (IdName in : values) { if (s instanceof IdName) { if ((in.getId() != null) && in.getId().equals( ((IdName)s).getId())) { selectedValues.add(in); break; } } else if ((in.getId() != null) && in.getId().equals(s)) { selectedValues.add(in); break; } } } return selectedValues; } private List<Object> getSelectedValuesAsObject(List<Serializable> defaultValues) { List<Object> selectedValues = new ArrayList<Object>(); for (Serializable s : defaultValues) { selectedValues.add(s); } return selectedValues; } private AjaxFormComponentUpdatingBehavior createAjax(final QueryParameter parameter) { return new AjaxFormComponentUpdatingBehavior("onchange") { @Override protected void onUpdate(AjaxRequestTarget target) { populateDependentParameters(parameter, target, false); } }; } private AjaxFormComponentUpdatingBehavior createAjax(final QueryParameter parameter, final IModel model, final DateTextField dateField) { return new AjaxFormComponentUpdatingBehavior("onchange") { @SuppressWarnings("unchecked") @Override protected void onUpdate(AjaxRequestTarget target) { // @todo wicket 1.5 does not update model for DateField and DateTimeField // https://issues.apache.org/jira/browse/WICKET-4496 // use this as an workaround model.setObject(dateField.getDefaultModelObject()); populateDependentParameters(parameter, target, false); } }; } // @todo wicket 1.5 // used to update hours and minutes private AjaxFormComponentUpdatingBehavior createAjax(final QueryParameter parameter, final IModel model, final Component component, final String time) { return new AjaxFormComponentUpdatingBehavior("onchange") { @SuppressWarnings("unchecked") @Override protected void onUpdate(AjaxRequestTarget target) { // @todo wicket 1.5 does not update model for DateField and DateTimeField // https://issues.apache.org/jira/browse/WICKET-4496 // use this as an workaround if ((model == null) || (model.getObject() == null)) { return; } Date date = (Date)model.getObject(); if ("hours".equals(time)) { date = DateUtil.setHours(date, (Integer)component.getDefaultModelObject()); } else if ("minutes".equals(time)) { date = DateUtil.setMinutes(date, (Integer)component.getDefaultModelObject()); } model.setObject(date); populateDependentParameters(parameter, target, false); } }; } private void populateDependentParameters(QueryParameter parameter, AjaxRequestTarget target, boolean recursive) { Report nextReport = getNextReport(); if (nextReport == null) { return; } Map<String, QueryParameter> childParams = ParameterUtil.getChildDependentParameters(nextReport, parameter); // update model parameter values for every child parameter for (QueryParameter childParam : childParams.values()) { if (!paramList.contains(childParam)) { continue; } Component childComponent = paramComponentsMap.get(childParam); List<IdName> values = new ArrayList<IdName>(); boolean allParentsHaveValues = true; Map<String, QueryParameter> allParentParams = ParameterUtil.getParentDependentParameters(nextReport, childParam); for (QueryParameter parentParam : allParentParams.values()) { if (runtimeModel.getParameters().get(parentParam.getName()).getProcessingValue() == null) { allParentsHaveValues = false; break; } } if ((childParam.getSource() != null) && (childParam.getSource().trim().length() > 0) && allParentsHaveValues) { try { Map<String, Serializable> allParameterValues = new HashMap<String, Serializable>(); for (String name : runtimeModel.getParameters().keySet()) { ReportRuntimeParameterModel model = runtimeModel.getParameters().get(name); allParameterValues.put(model.getName(), (Serializable) model.getProcessingValue()); } //System.out.println("!!!! map = " + ParameterUtil.getDebugParameters(allParameterValues)); values = dataSourceService.getDependentParameterValues( getDataSource(), childParam, paramMap, allParameterValues); } catch (Exception e) { e.printStackTrace(); LOG.error("Get dependent parameter values for : " + childParam.getName() + " > " + e.getMessage(), e); error(e.getMessage()); } } NextRuntimeParameterModel parameterModel = (NextRuntimeParameterModel) runtimeModel.getParameters().get(childParam.getName()); // if nothing is selected for that parameter (see scheduler) we look for the default values if ((parameterModel.getValueList() == null) || (parameterModel.getValueList().size() == 0)) { List<Serializable> list = getSelectedValues(values, depDefValues.get(childParam)); if (list.size() > 0) { parameterModel.setValueList(getSelectedValuesAsObject(list)); } } if (values != null && values.size() > 0) { parameterModel.setParameterValues(values); } else { parameterModel.setParameterValues(new ArrayList<IdName>()); } if (target != null) { target.add(childComponent); } if (recursive) { populateDependentParameters(childParam, target, true); } } } @SuppressWarnings("unchecked") private Palette createPalette(QueryParameter parameter, IModel listModel, AjaxFormComponentUpdatingBehavior ajaxUpdate) { ParameterPalette parameterPalette = new ParameterPalette(parameter, listModel, 10); parameterPalette.setUpdatingBehavior(ajaxUpdate); return parameterPalette; } private String getDisplayableParameterName(QueryParameter parameter) { if (parameter.isHidden()) { return ""; } String name = getParameterName(parameter); if (parameter.isMandatory()) { name += "&nbsp;<em>*</em>"; } return name; } private String getParameterName(QueryParameter parameter) { String name = parameter.getRuntimeName(); if ((name == null) || name.trim().equals("")) { name = parameter.getName(); } else { name = StringUtil.getI18nString(name,getLocaleLanguage()); } return name; } class ParameterPalette extends ExtendedPalette<Object> { private static final long serialVersionUID = 1L; private QueryParameter parameter; private AjaxFormComponentUpdatingBehavior updatingBehavior; public ParameterPalette(QueryParameter parameter, IModel<List<Object>> listModel, int rows) { //super("palette", listModel, new ParameterChoicesModel(parameter), new ParameterChoiceRenderer(), rows, false); super("palette", listModel, new ParameterChoicesModel(parameter), new ParameterChoiceRenderer(), rows, false, true); this.parameter = parameter; } public void setUpdatingBehavior(AjaxFormComponentUpdatingBehavior updatingBehavior) { this.updatingBehavior = updatingBehavior; } @Override protected Recorder<Object> newRecorderComponent() { Recorder<Object> recorder = super.newRecorderComponent(); String paramaterName = getParameterName(parameter); recorder.setLabel(new Model<String>(paramaterName)); if (!parameter.isHidden()) { if (parameter.isMandatory()) { recorder.setRequired(true); } } if (ParameterUtil.getChildDependentParameters(getNextReport(), parameter).size() > 0) { recorder.add(updatingBehavior); } return recorder; } } class ParameterChoicesModel extends LoadableDetachableModel<List<IdName>> { private static final long serialVersionUID = 1L; private QueryParameter parameter; public ParameterChoicesModel(QueryParameter parameter) { this.parameter = parameter; } @Override protected List<IdName> load() { String parameterName = parameter.getName(); List<IdName> list = runtimeModel.getParameters().get(parameterName).getValues(); return list.size() > 0 ? list : new ArrayList<IdName>(); } } class ParameterChoiceRenderer implements IChoiceRenderer<Object> { private static final long serialVersionUID = 1L; public Object getDisplayValue(Object object) { IdName value = (IdName) object; return (value.getName() == null) ? value.getId() : value.getName(); } public String getIdValue(Object object, int index) { if (object == null) { return ""; } if (!(object instanceof IdName)) { return object.toString(); } IdName value = (IdName) object; if (value.getId() == null) { return Integer.toString(index); } Object returnValue = value.getId(); if (returnValue == null) { return ""; } // IMPORTANT : if values start or end with space , on submit first rawValue will be ignored! // so we assure that the id never starts or ends with space! // // replace comma with other character (otherwise values with comma will be interpreted as two // values and no selection will be done for them) return "@" + returnValue.toString().replace(",", "-") + "@"; } } public boolean hasPalette() { for (QueryParameter parameter : paramList) { if (parameter.getSelection().equalsIgnoreCase(QueryParameter.MULTIPLE_SELECTION)) { if (parameter.getSource() != null && parameter.getSource().trim().length() > 0) { if (!parameter.isHidden()) { return true; } } } } return false; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.uima.resource; import java.util.Map; import org.apache.uima.UIMAException; /** * Thrown by {@link Resource#initialize(ResourceSpecifier,Map)} to indicate that a failure has * occurred during initialization. * * */ public class ResourceInitializationException extends UIMAException { private static final long serialVersionUID = -2521675776941322837L; /** * Message key for a standard UIMA exception message: "Error initializing "{0}" from descriptor * {1}." */ public static final String ERROR_INITIALIZING_FROM_DESCRIPTOR = "error_initializing_from_descriptor"; /** * Message key for a standard UIMA exception message: "Annotator class name is required for a * primitive Text Analysis Engine." */ public static final String MISSING_ANNOTATOR_CLASS_NAME = "missing_annotator_class_name"; /** * Message key for a standard UIMA exception message: "Annotator class "{0}" was not found." */ public static final String ANNOTATOR_CLASS_NOT_FOUND = "annotator_class_not_found"; /** * Message key for a standard UIMA exception message: "Class "{0}" is not an Annotator." */ public static final String NOT_AN_ANNOTATOR = "not_an_annotator"; /** * Message key for a standard UIMA exception message: "Annotator class {0} does not implement an * Annotator interface that is supported by Analysis Engine implementation {1}." */ public static final String ANNOTATOR_INTERFACE_NOT_SUPPORTED = "annotator_interface_not_supported"; /** * Message key for a standard UIMA exception message: "Could not instantiate Annotator class * "{0}". Check that your annotator class is not abstract and has a zero-argument constructor." */ public static final String COULD_NOT_INSTANTIATE_ANNOTATOR = "could_not_instantiate_annotator"; /** * Message key for a standard UIMA exception message: "Initialization of annotator class "{0}" * failed." */ public static final String ANNOTATOR_INITIALIZATION_FAILED = "annotator_initialization_failed"; /** * Message key for a standard UIMA exception message: "The feature {0} is declared twice, with * incompatible range types {1} and {2}. (Descriptor: {3})" */ public static final String INCOMPATIBLE_RANGE_TYPES = "incompatible_range_types"; /** * Message key for a standard UIMA exception message: "The feature {0} is declared twice, with * incompatible element range types {1} and {2}. (Descriptor: {3})." */ public static final String INCOMPATIBLE_ELEMENT_RANGE_TYPES = "incompatible_element_range_types"; /** * Message key for a standard UIMA exception message: "The Type {0} is declared twice, with * incompatible supertypes {1} and {2}." */ public static final String INCOMPATIBLE_SUPERTYPES = "incompatible_supertypes"; /** * Message key for a standard UIMA exception message: "The feature {0} is declared twice, with * incompatible multipleReferencesAllowed specifications. (Descriptor: {1})" */ public static final String INCOMPATIBLE_MULTI_REFS = "incompatible_multi_refs"; /** * Message key for a standard UIMA exception message: The String subtype {0} is declared twice, * with different sets of allowed values: {1} and {2}. (Descriptor: {3}) */ public static final String ALLOWED_VALUES_NOT_IDENTICAL = "string_allowed_values_not_the_same"; /** * Message key for a standard UIMA exception message: "Undefined type "{0}", referenced in feature * "{1}" declared on type "{2}"." */ public static final String UNDEFINED_RANGE_TYPE = "undefined_range_type"; /** * Message key for a standard UIMA exception message: "Undefined type "{0}", referenced as * supertype of type "{1}"." */ public static final String UNDEFINED_SUPERTYPE = "undefined_supertype"; /** * Message key for a standard UIMA exception message: "Undefined type "{0}", referenced in * definition of index "{1}"." */ public static final String UNDEFINED_TYPE_FOR_INDEX = "undefined_type_for_index"; /** * Message key for a standard UIMA exception message: "Undefined type "{0}" in type priority * list." */ public static final String UNDEFINED_TYPE_FOR_PRIORITY_LIST = "undefined_type_for_priority_list"; /** * Message key for a standard UIMA exception message: "The key Feature "{0}" declared for Index * "{1}" was not found." */ public static final String INDEX_KEY_FEATURE_NOT_FOUND = "index_key_feature_not_found"; /** * Message key for a standard UIMA exception message: "The Analysis Engine Descriptor is invalid - * a Type System may not be explicitly set for an aggregate Analysis Engine." */ public static final String AGGREGATE_AE_TYPE_SYSTEM = "aggregate_ae_type_system"; /** * Message key for a standard UIMA exception message: "Type {0} extends String and must specify a * list of allowed values." */ public static final String MISSING_ALLOWED_VALUES = "missing_allowed_values"; /** * Message key for a standard UIMA exception message: "Type {0} specifies a list of allowed values * but is not a subtype of uima.cas.String. You may only specify a list of allowed values for * string subtypes." */ public static final String ALLOWED_VALUES_ON_NON_STRING_TYPE = "allowed_values_on_non_string_type"; /** * Message key for a standard UIMA exception message: "Duplicate configuration parameter name * "{0}" in component "{1}"." */ public static final String DUPLICATE_CONFIGURATION_PARAMETER_NAME = "duplicate_configuration_parameter_name"; /** * Message key for a standard UIMA exception message: "This resource requires {0} parameter(s)." */ public static final String INCORRECT_NUMBER_OF_PARAMETERS = "incorrect_number_of_parameters"; /** * Message key for a standard UIMA exception message: "No resource could be found for the * parameters {0}." */ public static final String NO_RESOURCE_FOR_PARAMETERS = "no_resource_for_parameters"; /** * Message key for a standard UIMA exception message: "Could not access the resource data at {0}." */ public static final String COULD_NOT_ACCESS_DATA = "could_not_access_data"; /** * Message key for a standard UIMA exception message: "The Resource class {0} could not be found." */ public static final String CLASS_NOT_FOUND = "class_not_found"; /** * Message key for a standard UIMA exception message: "Could not instantiate class {0}." */ public static final String COULD_NOT_INSTANTIATE = "could_not_instantiate"; /** * Message key for a standard UIMA exception message: "The Resource Factory does not know how to * create a resource of class {0} from the given ResourceSpecifier." */ public static final String DO_NOT_KNOW_HOW = "do_not_know_how"; /** * Message key for a standard UIMA exception message: "The class {0} does not implement * org.apache.uima.resource.SharedResourceObject." */ public static final String NOT_A_SHARED_RESOURCE_OBJECT = "not_a_shared_resource_object"; /** * Message key for a standard UIMA exception message: "For resource "{0}", could not load data * from class {1} into class {2}, because class {1} is not a Data Resource." */ public static final String NOT_A_DATA_RESOURCE = "not_a_data_resource"; /** * Message key for a standard UIMA exception message: "No resource with name "{0}" has been * declared in this Analysis Engine." */ public static final String UNKNOWN_RESOURCE_NAME = "unknown_resource_name"; /** * Message key for a standard UIMA exception message: "The resource with name "{0}" does not * implement the required interface {1}." */ public static final String RESOURCE_DOES_NOT_IMPLEMENT_INTERFACE = "resource_does_not_implement_interface"; /** * Message key for a standard UIMA exception message: "There is no resource satisfying the * required resource dependency with key "{0}"." */ public static final String RESOURCE_DEPENDENCY_NOT_SATISFIED = "resource_dependency_not_satisfied"; /** * Message key for a standard UIMA exception message: "Unknown Protocol: "{0}"." */ public static final String UNKNOWN_PROTOCOL = "unknown_protocol"; /** * Message key for a standard UIMA exception message: "Malformed URL "{0}"." */ public static final String MALFORMED_URL = "malformed_url"; /** * Message key for a standard UIMA exception message: "The configuration data {0} for Configuraion * parameter {1} in the resource is absent or not valid" */ public static final String RESOURCE_DATA_NOT_VALID = "resource_data_not_valid"; /** * Message key for a standard UIMA exception message: Configuration setting for {0} is absent */ public static final String CONFIG_SETTING_ABSENT = "config_setting_absent"; /** * Message key for a standard UIMA exception message: Two different CAS FeatureStructure indexes * with name "{0}" have been defined. */ public static final String DUPLICATE_INDEX_NAME = "duplicate_index_name"; /** * Message key for a standard UIMA exception message: Configuration parameter "{0}" in primitive * Analysis Engine "{1}" declares an override. Parameter overrides are allowed only in aggregate * Analysis Engines. */ public static final String PARAM_OVERRIDE_IN_PRIMITIVE = "param_override_in_primitive"; /** * Message key for a standard UIMA exception message: Configuration parameter "{0}" in aggregate * Analysis Engine "{1}" does not declare any overrides. Implicit overrides are no longer * supported. (Descriptor: {2}) */ public static final String INVALID_PARAM_OVERRIDE_NO_OVERRIDES = "invalid_param_override_no_overrides"; /** * Message key for a standard UIMA exception message: Invalid configuration parameter override * syntax "{0}" in parameter "{1}" of Analysis Engine "{2}". Overrides must be of the form * "&lt;delegateKey&gt;/&lt;paramName&gt;" */ public static final String INVALID_PARAM_OVERRIDE_SYNTAX = "invalid_param_override_syntax"; /** * Message key for a standard UIMA exception message: Invalid configuration parameter override * "{0}" in parameter "{1}" of Analysis Engine "{2}" -- delegate Analysis Engine "{3}" does not * exist. */ public static final String INVALID_PARAM_OVERRIDE_NONEXISTENT_DELEGATE = "invalid_param_override_nonexistent_delegate"; /** * Message key for a standard UIMA exception message: Invalid configuration parameter override * "{0}" in parameter "{1}" of Analysis Engine "{2}" -- delegate Analysis Engine "{3}" does not * declare parameter {4}. */ public static final String INVALID_PARAM_OVERRIDE_NONEXISTENT_PARAMETER = "invalid_param_override_nonexistent_parameter"; /** * Message key for a standard UIMA exception message: Invalid configuration parameter override * "{0}" in parameter "{1}" of Analysis Engine "{2}" -- delegate Analysis Engine "{3}" does not * declare parameter {4} in group {5}. */ public static final String INVALID_PARAM_OVERRIDE_NONEXISTENT_PARAMETER_IN_GROUP = "invalid_param_override_nonexistent_parameter_in_group"; /** * Message key for a standard UIMA exception message: The output Sofa "{0}" in component "{1}" is * not mapped to any output Sofa in its containing aggregate, "{2}". */ public static final String OUTPUT_SOFA_NOT_DECLARED_IN_AGGREGATE = "output_sofa_not_declared_in_aggregate"; /** * Message key for a standard UIMA exception message: The input Sofa "{0}" in component "{1}" is * not an input of the containing aggregate, "{2}", nor is it an output of another component in * the same aggregate. */ public static final String INPUT_SOFA_HAS_NO_SOURCE = "input_sofa_has_no_source"; /** * Message key for a standard UIMA exception message: The Sofa "{0}" in aggregate "{1}" is not * mapped to any sofa of any component in that aggregate. */ public static final String AGGREGATE_SOFA_NOT_MAPPED = "aggregate_sofa_not_mapped"; /** * Message key for a standard UIMA exception message: The Sofa "{0}" in component "{1}" of * aggregate "{2}" has conflicting mappings to aggregate sofas "{3}" and "{4}". */ public static final String SOFA_MAPPING_CONFLICT = "sofa_mapping_conflict"; /** * Message key for a standard UIMA exception message: An implementation class name is missing from * the descriptor. */ public static final String MISSING_IMPLEMENTATION_CLASS_NAME = "missing_implementation_class_name"; /** * Message key for a standard UIMA exception message: "Error creating CAS Processor with name * "{0}". The descriptor type "{1}" is not allowed - you must specify an AnalysisEngine or CAS * Consumer descriptor." */ public static final String NOT_A_CAS_PROCESSOR = "not_a_cas_processor"; /** * Message key for a standard UIMA exception message: "A CollectionReader descriptor specified * implementation class "{0}", but this class does not implement the CollectionReader interface." */ public static final String NOT_A_COLLECTION_READER = "not_a_collection_reader"; /** * Message key for a standard UIMA exception message: "A CasConsumer descriptor specified * implementation class "{0}", but this class does not implement the CasConsumer interface." */ public static final String NOT_A_CAS_CONSUMER = "not_a_cas_consumer"; /** * Message key for a standard UIMA exception message: "A CasInitializer descriptor specified * implementation class "{0}", but this class does not implement the CasInitializer interface." */ public static final String NOT_A_CAS_INITIALIZER = "not_a_cas_initializer"; /** * Message key for a standard UIMA exception message: "Initialization of CAS Processor with name * "{0}" failed. */ public static final String CAS_PROCESSOR_INITIALIZE_FAILED = "cas_processor_initialize_failed"; /** * Message key for a standard UIMA exception message: "The descriptor for aggregate AnalysisEngine * "{0}" declared multipleDeploymentAllowed == true, but its component "{1}" declared * multipleDeploymentAllowed == false, which is inconsistent." */ public static final String INVALID_MULTIPLE_DEPLOYMENT_ALLOWED = "invalid_multiple_deployment_allowed"; /** * Message key for a standard UIMA exception message: "The descriptor for aggregate AnalysisEngine * "{0}" declared modifiesCas == false, but its component "{1}" declared modifiesCas == true, * which is inconsistent." */ public static final String INVALID_MODIFIES_CAS = "invalid_modifies_cas"; /** * Message key for a standard UIMA exception message: "Invalid type priorities." */ public static final String INVALID_TYPE_PRIORITIES = "invalid_type_priorities"; /** * Message key for a standard UIMA exception message: "Type "{0}" does not define a supertype." */ public static final String NO_SUPERTYPE = "no_supertype"; /** * Message key for a standard UIMA exception message: Undefined component key "{1}", referenced in * Sofa mapping for Sofa "{2}" of aggregate "{3}". */ public static final String SOFA_MAPPING_HAS_UNDEFINED_COMPONENT_KEY = "sofa_mapping_has_undefined_component_key"; /** * Message key for a standard UIMA exception message: Component "{0}" does not contain Sofa "{1}", * referenced in Sofa mapping for Sofa "{2}" of aggregate "{3}". */ public static final String SOFA_MAPPING_HAS_UNDEFINED_COMPONENT_SOFA = "sofa_mapping_has_undefined_component_sofa"; /** * Message key for a standard UIMA exception message: "The class {0} is not a valid Analysis * Component. You must specify an Annotator, CAS Consumer, Collection Reader, or CAS Multiplier. " */ public static final String NOT_AN_ANALYSIS_COMPONENT = "not_an_analysis_component"; /** * Message key for a standard UIMA exception message: "An Aggregate Analysis Engine specified a * Flow Controller descriptor {0} of an invalid type ({1})). A FlowControllerDescription is * required." */ public static final String NOT_A_FLOW_CONTROLLER_DESCRIPTOR = "not_a_flow_controller_descriptor"; /** * Message key for a standard UIMA exception message: "{0} is not a supported framework * implementation" */ public static final String UNSUPPORTED_FRAMEWORK_IMPLEMENTATION = "unsupported_framework_implementation"; /** * Message key for a standard UIMA exception message: "The descriptor for aggregate AnalysisEngine * "{0}" declared outputsNewCASes == true, but all of its components declared outputsNewCASes == * false, which is inconsistent." */ public static final String INVALID_OUTPUTS_NEW_CASES = "invalid_outputs_new_CASes"; /** * Message key for a standard UIMA exception message: "The aggregate AnalysisEngine "{0}" declared * an empty &lt;flowController/&gt; element. You must specify an import or a * flowControllerDescription." */ public static final String EMPTY_FLOW_CONTROLLER_DECLARATION = "empty_flow_controller_declaration"; /** * Message key for a standard UIMA exception message: "The primitive AnalysisEngine "{0}" has an * annotator of type {1} but its descriptor declares input or output Sofas. Text annotators are * not allowed to declare input or output Sofas. Consider extending CasAnnotator_ImplBase or * JCasAnnotator_ImplBase instead." */ public static final String TEXT_ANNOTATOR_CANNOT_BE_SOFA_AWARE = "text_annotator_cannot_be_sofa_aware"; /** * Message key for a standard UIMA exception message: "Component descriptor did not specify the * required &lt;frameworkImplementation&gt; element." */ public static final String MISSING_FRAMEWORK_IMPLEMENTATION = "missing_framework_implementation"; /** * Message key for a standard UIMA exception message: "The CasCreationUtils.createCas method was * passed a collection containing an object of class {0}, which is not supported. Refer to the * Javadoc for a list of types accepted by this method." */ public static final String UNSUPPORTED_OBJECT_TYPE_IN_CREATE_CAS = "unsupported_object_type_in_create_cas"; /** * Message key for a standard UIMA exception message: "Sofa mappings were specified for the remote * Analysis Engine {0}. Sofa mappings are not currently supported for remote Analysis Engines. A * workaround is to wrap the remotely deployed AE in an Aggregate (on the remote side), and * specify Sofa mappings in that aggregate." */ public static final String SOFA_MAPPING_NOT_SUPPORTED_FOR_REMOTE = "sofa_mapping_not_supported_for_remote"; /** * Message key for a standard UIMA exception message: The descriptor for Aggregate Analysis Engine * "{0}" specified an invalid flow. The key "{1}" was used in the flow but is not defined as a key * in the &lt;delegateAnalysisEngineSpecifiers&gt; element of the descriptor. */ public static final String UNDEFINED_KEY_IN_FLOW = "undefined_key_in_flow"; /** * Message key for a standard UIMA exception message: The value "{0}" is an invalid value for the * FixedFlowController's "ActionAfterCasMultiplier" configuration parameter. Valid values are * "continue", "stop", "drop", and "dropIfNewCasProduced". */ public static final String INVALID_ACTION_AFTER_CAS_MULTIPLIER = "invalid_action_after_cas_multiplier"; /** * Message key for a standard UIMA exception message: The Flow Controller "{0}" requires a flow * constraints element of type "{1}" in the aggregate descriptor */ public static final String FLOW_CONTROLLER_REQUIRES_FLOW_CONSTRAINTS = "flow_controller_requires_flow_constraints"; /** * Message key for a standard UIMA exception message: The aggregate "{0}" references a * non-existent delegate "{1}" in it's Flow Controller's flow constraints */ public static final String FLOW_CONTROLLER_MISSING_DELEGATE = "flow_controller_missing_delegate"; /** * Message key for a standard UIMA exception message: Unexpected Exception thrown when * initializing Custom Resource "{0}" from descriptor "{1}". */ public static final String EXCEPTION_WHEN_INITIALIZING_CUSTOM_RESOURCE = "exception_when_initializing_custom_resource"; /** * Message key for a standard UIMA exception message: Unexpected Throwable or Error thrown when * initializing Custom Resource "{0}" from descriptor "{1}". */ public static final String THROWABLE_WHEN_INITIALIZING_CUSTOM_RESOURCE = "throwable_when_initializing_custom_resource"; public static final String REDEFINING_BUILTIN_TYPE = "redefining_builtin_type"; /** * Creates a new exception with a null message. */ public ResourceInitializationException() { } /** * Creates a new exception with the specified cause and a null message. * * @param aCause * the original exception that caused this exception to be thrown, if any */ public ResourceInitializationException(Throwable aCause) { super(aCause); } /** * Creates a new exception with a the specified message. * * @param aResourceBundleName * the base name of the resource bundle in which the message for this exception is * located. * @param aMessageKey * an identifier that maps to the message for this exception. The message may contain * placeholders for arguments as defined by the {@link java.text.MessageFormat * MessageFormat} class. * @param aArguments * The arguments to the message. <code>null</code> may be used if the message has no * arguments. */ public ResourceInitializationException(String aResourceBundleName, String aMessageKey, Object[] aArguments) { super(aResourceBundleName, aMessageKey, aArguments); } /** * Creates a new exception with the specified message and cause. * * @param aResourceBundleName * the base name of the resource bundle in which the message for this exception is * located. * @param aMessageKey * an identifier that maps to the message for this exception. The message may contain * placeholders for arguments as defined by the {@link java.text.MessageFormat * MessageFormat} class. * @param aArguments * The arguments to the message. <code>null</code> may be used if the message has no * arguments. * @param aCause * the original exception that caused this exception to be thrown, if any */ public ResourceInitializationException(String aResourceBundleName, String aMessageKey, Object[] aArguments, Throwable aCause) { super(aResourceBundleName, aMessageKey, aArguments, aCause); } /** * Creates a new exception with a message from the {@link #STANDARD_MESSAGE_CATALOG}. * * @param aMessageKey * an identifier that maps to the message for this exception. The message may contain * placeholders for arguments as defined by the {@link java.text.MessageFormat * MessageFormat} class. * @param aArguments * The arguments to the message. <code>null</code> may be used if the message has no * arguments. */ public ResourceInitializationException(String aMessageKey, Object[] aArguments) { super(aMessageKey, aArguments); } /** * Creates a new exception with the specified cause and a message from the * {@link #STANDARD_MESSAGE_CATALOG}. * * @param aMessageKey * an identifier that maps to the message for this exception. The message may contain * placeholders for arguments as defined by the {@link java.text.MessageFormat * MessageFormat} class. * @param aArguments * The arguments to the message. <code>null</code> may be used if the message has no * arguments. * @param aCause * the original exception that caused this exception to be thrown, if any */ public ResourceInitializationException(String aMessageKey, Object[] aArguments, Throwable aCause) { super(aMessageKey, aArguments, aCause); } }
package org.batfish.z3; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.batfish.collections.NodeSet; import org.batfish.job.BatfishJob; import org.batfish.common.BatfishException; import org.batfish.representation.Flow; import org.batfish.representation.Ip; import org.batfish.representation.IpProtocol; import com.microsoft.z3.BitVecExpr; import com.microsoft.z3.BitVecNum; import com.microsoft.z3.BoolExpr; import com.microsoft.z3.Context; import com.microsoft.z3.Expr; import com.microsoft.z3.Fixedpoint; import com.microsoft.z3.FuncDecl; import com.microsoft.z3.Model; import com.microsoft.z3.Params; import com.microsoft.z3.Solver; import com.microsoft.z3.Status; import com.microsoft.z3.Z3Exception; public class CompositeNodJob extends BatfishJob<NodJobResult> { private List<Synthesizer> _dataPlaneSynthesizers; private final NodeSet _nodeSet; private int _numPrograms; private List<QuerySynthesizer> _querySynthesizers; private String _tag; public CompositeNodJob(List<Synthesizer> dataPlaneSynthesizer, List<QuerySynthesizer> querySynthesizer, NodeSet nodeSet, String tag) { _numPrograms = dataPlaneSynthesizer.size(); if (_numPrograms != querySynthesizer.size()) { throw new BatfishException( "mismatch between number of programs and number of queries"); } _dataPlaneSynthesizers = dataPlaneSynthesizer; _querySynthesizers = querySynthesizer; _nodeSet = new NodeSet(); _nodeSet.addAll(nodeSet); _tag = tag; } @Override public NodJobResult call() throws Exception { long startTime = System.currentTimeMillis(); long elapsedTime; NodProgram latestProgram = null; Context ctx = null; try { BoolExpr[] answers = new BoolExpr[_numPrograms]; ctx = new Context(); Params p = ctx.mkParams(); p.add("fixedpoint.engine", "datalog"); p.add("fixedpoint.datalog.default_relation", "doc"); p.add("fixedpoint.print_answer", true); for (int i = 0; i < _numPrograms; i++) { Synthesizer dataPlaneSynthesizer = _dataPlaneSynthesizers.get(i); QuerySynthesizer querySynthesizer = _querySynthesizers.get(i); NodProgram baseProgram = dataPlaneSynthesizer .synthesizeNodProgram(ctx); NodProgram queryProgram = querySynthesizer .getNodProgram(baseProgram); NodProgram program = baseProgram.append(queryProgram); latestProgram = program; Fixedpoint fix = ctx.mkFixedpoint(); fix.setParameters(p); for (FuncDecl relationDeclaration : program .getRelationDeclarations().values()) { fix.registerRelation(relationDeclaration); } for (BoolExpr rule : program.getRules()) { fix.addRule(rule, null); } for (BoolExpr query : program.getQueries()) { Status status = fix.query(query); switch (status) { case SATISFIABLE: break; case UNKNOWN: throw new BatfishException("Query satisfiability unknown"); case UNSATISFIABLE: break; default: throw new BatfishException("invalid status"); } } Expr answer = fix.getAnswer(); BoolExpr solverInput; if (answer.getArgs().length > 0) { List<Expr> reversedVarList = new ArrayList<Expr>(); reversedVarList.addAll(program.getVariablesAsConsts().values()); Collections.reverse(reversedVarList); Expr[] reversedVars = reversedVarList.toArray(new Expr[] {}); Expr substitutedAnswer = answer.substituteVars(reversedVars); solverInput = (BoolExpr) substitutedAnswer; } else { solverInput = (BoolExpr) answer; } if (_querySynthesizers.get(i).getNegate()) { answers[i] = ctx.mkNot(solverInput); } else { answers[i] = solverInput; } } BoolExpr compositeQuery = ctx.mkAnd(answers); Solver solver = ctx.mkSolver(); solver.add(compositeQuery); Status solverStatus = solver.check(); switch (solverStatus) { case SATISFIABLE: break; case UNKNOWN: throw new BatfishException("Stage 2 query satisfiability unknown"); case UNSATISFIABLE: elapsedTime = System.currentTimeMillis() - startTime; return new NodJobResult(elapsedTime); default: throw new BatfishException("invalid status"); } Model model = solver.getModel(); Map<String, Long> constraints = new LinkedHashMap<String, Long>(); for (FuncDecl constDecl : model.getConstDecls()) { String name = constDecl.getName().toString(); BitVecExpr varConstExpr = latestProgram.getVariablesAsConsts().get( name); long val = ((BitVecNum) model.getConstInterp(varConstExpr)) .getLong(); constraints.put(name, val); } Set<Flow> flows = new HashSet<Flow>(); for (String node : _nodeSet) { Flow flow = createFlow(node, constraints); flows.add(flow); } elapsedTime = System.currentTimeMillis() - startTime; return new NodJobResult(elapsedTime, flows); } catch (Z3Exception e) { elapsedTime = System.currentTimeMillis() - startTime; return new NodJobResult(elapsedTime, new BatfishException( "Error running NoD on concatenated data plane", e)); } finally { if (ctx != null) { ctx.dispose(); } } } private Flow createFlow(String node, Map<String, Long> constraints) { long src_ip = 0; long dst_ip = 0; long src_port = 0; long dst_port = 0; long protocol = IpProtocol.IP.number(); for (String varName : constraints.keySet()) { Long value = constraints.get(varName); switch (varName) { case Synthesizer.SRC_IP_VAR: src_ip = value; break; case Synthesizer.DST_IP_VAR: dst_ip = value; break; case Synthesizer.SRC_PORT_VAR: src_port = value; break; case Synthesizer.DST_PORT_VAR: dst_port = value; break; case Synthesizer.IP_PROTOCOL_VAR: protocol = value; break; default: throw new Error("invalid variable name"); } } return new Flow(node, new Ip(src_ip), new Ip(dst_ip), (int) src_port, (int) dst_port, IpProtocol.fromNumber((int) protocol), _tag); } }
/** * Copyright 2010 The PlayN Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package playn.core.gl; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.IntBuffer; import java.util.ArrayList; import java.util.List; import pythagoras.i.Rectangle; import playn.core.AbstractPlatform; import playn.core.CanvasImage; import playn.core.Image; import playn.core.InternalTransform; import playn.core.StockInternalTransform; public abstract class GLContext { /** Used to configure texture image scaling. */ public static enum Filter { LINEAR, NEAREST } /** Used to track and report rendering statistics. */ public static class Stats { public int frames; public int shaderCreates; public int frameBufferCreates; public int texCreates; public int shaderBinds; public int frameBufferBinds; public int texBinds; public int quadsRendered; public int trisRendered; public int shaderFlushes; /** Resets all counters. */ public void reset() { frames = 0; shaderCreates = 0; frameBufferCreates = 0; texCreates = 0; shaderBinds = 0; frameBufferBinds = 0; texBinds = 0; quadsRendered = 0; trisRendered = 0; shaderFlushes = 0; } } protected static final boolean STATS_ENABLED = true; protected final Stats stats = new Stats(); protected final AbstractPlatform platform; private GLShader curShader; private int lastFramebuffer, epoch; private int pushedFramebuffer = -1, pushedWidth, pushedHeight; private List<Rectangle> scissors = new ArrayList<Rectangle>(); private int scissorDepth; private Image fillImage; /** The (actual screen pixel) width and height of our default frame buffer. */ protected int defaultFbufWidth, defaultFbufHeight; /** The (actual screen pixel) width and height of our current frame buffer. */ protected int curFbufWidth, curFbufHeight; /** The (logical pixel) width and height of our view. */ public int viewWidth, viewHeight; /** The scale factor for HiDPI mode, or 1 if HDPI mode is not enabled. */ public final Scale scale; /** * Sets the frame buffer to the specified width and height (in pixels). The view will potentially * be smaller than this size if a HiDPI scale factor is in effect. */ public final void setSize(int pixelWidth, int pixelHeight) { viewWidth = scale.invScaledFloor(pixelWidth); viewHeight = scale.invScaledFloor(pixelHeight); curFbufWidth = defaultFbufWidth = pixelWidth; curFbufHeight = defaultFbufHeight = pixelHeight; viewConfigChanged(); } /** * Configures the filter function used when rendering scaled textures. * * @param minFilter the scaling to use when rendering textures that are scaled down. * @param magFilter the scaling to use when rendering textures that are scaled up. */ public abstract void setTextureFilter(Filter minFilter, Filter magFilter); /** Returns the specified GL string parameter. */ public abstract String getString(int param); /** Returns the specified GL integer parameter. */ public abstract int getInteger(int param); /** Returns the specified GL float parameter. */ public abstract float getFloat(int param); /** Returns the specified GL boolean parameter. */ public abstract boolean getBoolean(int param); /** * See http://www.khronos.org/opengles/sdk/docs/man/xhtml/glTexImage2D.xml * * <p>The default implementation is based on {@link Image#getRgb} and will hand over an RGBA byte * array. Please set the (internal)format and type parameters accordingly; they are mainly * present for future support of different formats. The WebGL implementation will pass through * all parameters.</p> */ public void texImage2D(Image image, int target, int level, int internalformat, int format, int type) { throw new UnsupportedOperationException(); } /** * See http://www.khronos.org/opengles/sdk/docs/man/xhtml/glTexSubImage2D.xml * * <p>The default implementation is based on {@link Image#getRgb} and will hand over a RGBA byte * array. Please set the (internal)format and type parameters accordingly; they are mainly * present for future support of different formats. The WebGL implementation will pass through * all parameters.</p> */ public void texSubImage2D(Image image, int target, int level, int xOffset, int yOffset, int format, int type) { throw new UnsupportedOperationException(); } /** * Creates a shader program, for use by a single {@link GLShader}. * @param vertShader the source code for the vertex shader. * @param fragShader the source code for the fragment shader. */ public abstract GLProgram createProgram(String vertShader, String fragShader); /** * Creates a float buffer with the specified initial capacity. */ public abstract GLBuffer.Float createFloatBuffer(int capacity); /** * Creates a short buffer with the specified initial capacity. */ public abstract GLBuffer.Short createShortBuffer(int capacity); /** Creates a framebuffer that will render into the supplied texture. <em>NOTE:</em> this must be * followed immediately by a call to {@link #bindFramebuffer(int,int,int)} or {@link * #pushFramebuffer}. */ public int createFramebuffer(int tex) { flush(true); // flush any pending rendering calls, because createFramebufferImpl (necessarily) // binds the new framebuffer in order to bind it to the specified texture (meh) return createFramebufferImpl(tex); } /** Deletes the supplied frame buffer (which will have come from {@link #createFramebuffer}). */ public abstract void deleteFramebuffer(int fbuf); /** Creates a texture with the specified repeat behavior. */ public abstract int createTexture(boolean repeatX, boolean repeatY, boolean mipmaps); /** Creates a texture of the specified size, with the specified repeat behavior, into which we * can subsequently render. */ public abstract int createTexture(int width, int height, boolean repeatX, boolean repeatY, boolean mipmaps); /** Generates mipmaps for the specified texture. */ public abstract void generateMipmap(int tex); /** Activates the specified texture unit. * @param glTextureN the texture unit to active (e.g. {@link GL20#GL_TEXTURE0}). */ public abstract void activeTexture(int glTextureN); /** Binds the specified texture. */ public abstract void bindTexture(int tex); /** Destroys the supplied texture. */ public abstract void destroyTexture(int tex); /** Starts a series of drawing commands that are clipped to the specified rectangle (in view * coordinates, not OpenGL coordinates). Thus must be followed by a call to {@link #endClipped} * when the clipped drawing commands are done. * @return whether the resulting clip rectangle is not empty */ public abstract boolean startClipped(int x, int y, int width, int height); /** Ends a series of drawing commands that were clipped per a call to {@link #startClipped}. */ public abstract void endClipped(); /** Clears the bound framebuffer with the specified color. */ public abstract void clear(float r, float g, float b, float a); /** NOOP except when debugging, checks and logs whether any GL errors have occurred. */ public abstract void checkGLError(String op); /** Queues a texture to be destroyed on the GL thread. */ public void queueDestroyTexture(final int tex) { platform.invokeLater(new Runnable() { public void run() { destroyTexture(tex); } }); } /** Queues a framebuffer to be destroyed on the GL thread. */ public void queueDeleteFramebuffer(final int fbuf) { platform.invokeLater(new Runnable() { public void run() { deleteFramebuffer(fbuf); } }); } /** Queues a custom shader to be cleaned up on the GL thread. */ public void queueClearShader(final GLShader shader) { platform.invokeLater(new Runnable() { public void run() { shader.clearProgram(); } }); } /** Creates an identity transform, which may subsequently be mutated. */ public InternalTransform createTransform() { return new StockInternalTransform(); } /** Returns the root transform which converts scale-independent coordinates into pixels. On some * platforms this may also handle screen rotation. Do not modify! */ public abstract InternalTransform rootTransform(); public void bindFramebuffer(int fbuf, int width, int height) { if (fbuf != lastFramebuffer) { flush(true); // flush and deactivate any shader rendering to the old framebuffer checkGLError("bindFramebuffer"); bindFramebufferImpl(lastFramebuffer = fbuf, curFbufWidth = width, curFbufHeight = height); } } public void bindFramebuffer() { bindFramebuffer(defaultFramebuffer(), defaultFbufWidth, defaultFbufHeight); } /** Stores the metadata for the currently bound frame buffer, and binds the supplied framebuffer. * This must be followed by a call to {@link #popFramebuffer}. Also, it is not allowed to push a * framebuffer if a framebuffer is already pushed. Only one level of nesting is supported. */ public void pushFramebuffer(int fbuf, int width, int height) { assert pushedFramebuffer == -1 : "Already have a pushed framebuffer"; pushedFramebuffer = lastFramebuffer; pushedWidth = curFbufWidth; pushedHeight = curFbufHeight; bindFramebuffer(fbuf, width, height); } /** Pops the framebuffer pushed by a previous call to {@link #pushFramebuffer} and restores the * framebuffer that was active prior to that call. */ public void popFramebuffer() { assert pushedFramebuffer != -1 : "Have no pushed framebuffer"; bindFramebuffer(pushedFramebuffer, pushedWidth, pushedHeight); pushedFramebuffer = -1; } /** Returns the supplied shader if non-null, or the default quad shader if null. */ public GLShader quadShader (GLShader custom) { return custom == null ? quadShader() : custom; } /** Returns the supplied shader if non-null, or the default triangles shader if null. */ public GLShader trisShader (GLShader custom) { return custom == null ? trisShader() : custom; } public void flush() { flush(false); } public void flush(boolean deactivate) { if (curShader != null) { checkGLError("flush()"); curShader.flush(); if (deactivate) { curShader.deactivate(); curShader = null; } } } /** * Makes the supplied shader the current shader, flushing any previous shader. */ public boolean useShader(GLShader shader) { if (curShader == shader) return false; checkGLError("useShader"); flush(true); curShader = shader; return true; } /** * Returns the current rendering stats. These will be all zeros unless the library was compiled * with stats enabled (which is not the default). */ public Stats stats() { return stats; } /** * Returns debugging info on the quad shader. Useful for performance analysis. */ public String quadShaderInfo() { return String.valueOf(quadShader()); } /** * Returns debugging info on the triangles shader. Useful for performance analysis. */ public String trisShaderInfo() { return String.valueOf(trisShader()); } /** * Returns a (created on demand, then cached) image used when filling solid color quads or * triangles. */ Image fillImage() { if (fillImage == null) { CanvasImage image = platform.graphics().createImage(1, 1); image.canvas().setFillColor(0xFFFFFFFF).fillRect(0, 0, image.width(), image.height()); fillImage = image; } return fillImage; } /** * Adds the given rectangle to the scissors stack, intersecting with the previous one if it * exists. Intended for use by subclasses to implement {@link #startClipped} and {@link * #endClipped}. * * <p>NOTE: calls to this method <b>must</b> be matched by a corresponding call {@link * #popScissorState}, or all hell will break loose.</p> * * @return the new clipping rectangle to use */ protected Rectangle pushScissorState (int x, int y, int width, int height) { // grow the scissors buffer if necessary if (scissorDepth == scissors.size()) { scissors.add(new Rectangle()); } Rectangle r = scissors.get(scissorDepth); if (scissorDepth == 0) { r.setBounds(x, y, width, height); } else { // intersect current with previous Rectangle pr = scissors.get(scissorDepth - 1); r.setLocation(Math.max(pr.x, x), Math.max(pr.y, y)); r.setSize(Math.min(pr.maxX(), x + width - 1) - r.x, Math.min(pr.maxY(), y + height - 1) - r.y); } scissorDepth++; return r; } /** * Removes the most recently pushed scissor state and returns the rectangle that should now * be used for clipping, or null if clipping should be disabled. */ protected Rectangle popScissorState () { scissorDepth--; return scissorDepth == 0 ? null : scissors.get(scissorDepth - 1); } /** * Returns the current scissor stack size. Zero means no scissors are currently pushed. */ protected int getScissorDepth () { return scissorDepth; } protected GLContext(AbstractPlatform platform, float scaleFactor) { this.scale = new Scale(scaleFactor); this.platform = platform; } protected void viewConfigChanged () { bindFramebuffer(); } /** * Increments our GL context epoch. This should be called by platform backends when the GL * context has been lost and a new one created. */ protected void incrementEpoch () { ++epoch; } /** * Returns the current GL context epoch. This is used to invalidate shaders when we lose and * regain our GL context. */ protected int epoch () { return epoch; } /** * Returns the default framebuffer. */ protected abstract int defaultFramebuffer(); /** * Creates a framebuffer that will render into the supplied texture. */ protected abstract int createFramebufferImpl(int tex); /** * Binds the specified framebuffer and sets the viewport to the specified dimensions. */ protected abstract void bindFramebufferImpl(int fbuf, int width, int height); protected boolean shouldTryQuadShader() { return QuadShader.isLikelyToPerform(this); } protected GLShader createQuadShader() { if (shouldTryQuadShader()) { try { GLShader quadShader = new QuadShader(this); quadShader.createCore(); // force core creation to test whether it fails return quadShader; } catch (Throwable t) { platform.reportError("Failed to create QuadShader", t); } } return new IndexedTrisShader(this); } // used by GLContext.tex(Sub)Image2D impls protected static ByteBuffer getRgba(Image image) { int w = (int) image.width(), h = (int) image.height(), size = w * h; int[] rawPixels = new int[size]; ByteBuffer pixels = ByteBuffer.allocateDirect(size * 4); pixels.order(ByteOrder.nativeOrder()); IntBuffer rgba = pixels.asIntBuffer(); image.getRgb(0, 0, w, h, rawPixels, 0, w); for (int i = 0; i < size; i++) { int argb = rawPixels[i]; // Order is inverted because this is read as a byte array, and we store intel ints. rgba.put(i, ((argb >> 16) & 0x0ff) | (argb & 0x0ff00ff00) | ((argb & 0xff) << 16)); } return pixels; } protected abstract GLShader quadShader(); protected abstract GLShader trisShader(); }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration; import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequestImpl; import org.apache.hadoop.hbase.regionserver.compactions.RatioBasedCompactionPolicy; import org.apache.hadoop.hbase.regionserver.wal.FSHLog; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; public class TestCompactionPolicy { private final static Log LOG = LogFactory.getLog(TestCompactionPolicy.class); protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); protected Configuration conf; protected HStore store; private static final String DIR = TEST_UTIL.getDataTestDir( TestCompactionPolicy.class.getSimpleName()).toString(); protected static Path TEST_FILE; protected static final int minFiles = 3; protected static final int maxFiles = 5; protected static final long minSize = 10; protected static final long maxSize = 2100; private FSHLog hlog; private HRegion region; @Before public void setUp() throws Exception { config(); initialize(); } /** * setup config values necessary for store */ protected void config() { this.conf = TEST_UTIL.getConfiguration(); this.conf.setLong(HConstants.MAJOR_COMPACTION_PERIOD, 0); this.conf.setInt(CompactionConfiguration.HBASE_HSTORE_COMPACTION_MIN_KEY, minFiles); this.conf.setInt(CompactionConfiguration.HBASE_HSTORE_COMPACTION_MAX_KEY, maxFiles); this.conf.setLong(CompactionConfiguration.HBASE_HSTORE_COMPACTION_MIN_SIZE_KEY, minSize); this.conf.setLong(CompactionConfiguration.HBASE_HSTORE_COMPACTION_MAX_SIZE_KEY, maxSize); this.conf.setFloat(CompactionConfiguration.HBASE_HSTORE_COMPACTION_RATIO_KEY, 1.0F); } /** * Setting up a Store * @throws IOException with error */ protected void initialize() throws IOException { Path basedir = new Path(DIR); String logName = "logs"; Path logdir = new Path(DIR, logName); HColumnDescriptor hcd = new HColumnDescriptor(Bytes.toBytes("family")); FileSystem fs = FileSystem.get(conf); fs.delete(logdir, true); HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(Bytes.toBytes("table"))); htd.addFamily(hcd); HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false); hlog = new FSHLog(fs, basedir, logName, conf); ChunkCreator.initialize(MemStoreLABImpl.CHUNK_SIZE_DEFAULT, false, 0, 0, 0, null); region = HRegion.createHRegion(info, basedir, conf, htd, hlog); region.close(); Path tableDir = FSUtils.getTableDir(basedir, htd.getTableName()); region = new HRegion(tableDir, hlog, fs, conf, info, htd, null); store = new HStore(region, hcd, conf); TEST_FILE = region.getRegionFileSystem().createTempName(); fs.createNewFile(TEST_FILE); } @After public void tearDown() throws IOException { IOException ex = null; try { region.close(); } catch (IOException e) { LOG.warn("Caught Exception", e); ex = e; } try { hlog.close(); } catch (IOException e) { LOG.warn("Caught Exception", e); ex = e; } if (ex != null) { throw ex; } } ArrayList<Long> toArrayList(long... numbers) { ArrayList<Long> result = new ArrayList<>(); for (long i : numbers) { result.add(i); } return result; } List<HStoreFile> sfCreate(long... sizes) throws IOException { ArrayList<Long> ageInDisk = new ArrayList<>(); for (int i = 0; i < sizes.length; i++) { ageInDisk.add(0L); } return sfCreate(toArrayList(sizes), ageInDisk); } List<HStoreFile> sfCreate(ArrayList<Long> sizes, ArrayList<Long> ageInDisk) throws IOException { return sfCreate(false, sizes, ageInDisk); } List<HStoreFile> sfCreate(boolean isReference, long... sizes) throws IOException { ArrayList<Long> ageInDisk = new ArrayList<>(sizes.length); for (int i = 0; i < sizes.length; i++) { ageInDisk.add(0L); } return sfCreate(isReference, toArrayList(sizes), ageInDisk); } List<HStoreFile> sfCreate(boolean isReference, ArrayList<Long> sizes, ArrayList<Long> ageInDisk) throws IOException { List<HStoreFile> ret = Lists.newArrayList(); for (int i = 0; i < sizes.size(); i++) { ret.add(new MockHStoreFile(TEST_UTIL, TEST_FILE, sizes.get(i), ageInDisk.get(i), isReference, i)); } return ret; } long[] getSizes(List<HStoreFile> sfList) { long[] aNums = new long[sfList.size()]; for (int i = 0; i < sfList.size(); ++i) { aNums[i] = sfList.get(i).getReader().length(); } return aNums; } void compactEquals(List<HStoreFile> candidates, long... expected) throws IOException { compactEquals(candidates, false, false, expected); } void compactEquals(List<HStoreFile> candidates, boolean forcemajor, long... expected) throws IOException { compactEquals(candidates, forcemajor, false, expected); } void compactEquals(List<HStoreFile> candidates, boolean forcemajor, boolean isOffPeak, long... expected) throws IOException { store.forceMajor = forcemajor; // Test Default compactions CompactionRequestImpl result = ((RatioBasedCompactionPolicy) store.storeEngine.getCompactionPolicy()).selectCompaction( candidates, new ArrayList<>(), false, isOffPeak, forcemajor); List<HStoreFile> actual = new ArrayList<>(result.getFiles()); if (isOffPeak && !forcemajor) { Assert.assertTrue(result.isOffPeak()); } Assert.assertEquals(Arrays.toString(expected), Arrays.toString(getSizes(actual))); store.forceMajor = false; } }
package net.koofr.api.json; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.lang.reflect.Array; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Modifier; import java.lang.reflect.ParameterizedType; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import net.koofr.api.util.Log; import net.koofr.api.util.StdLog; import com.eclipsesource.json.Json; import com.eclipsesource.json.JsonArray; import com.eclipsesource.json.JsonObject; import com.eclipsesource.json.JsonObject.Member; import com.eclipsesource.json.JsonValue; public class Transmogrifier { static Log log = new StdLog(); protected static boolean jsonHasField(JsonValue o, String f) { if(o.isObject()) { for(String n: o.asObject().names()) { if(f.equals(n)) { return true; } } } return false; } public static JsonValue mapObject(Object o) throws JsonException { try { return mapObjectUnsafe(o); } catch(Exception ex) { throw new JsonException("Transmogrification bug. Check ur code.", ex); } } @SuppressWarnings("rawtypes") public static void dumpObject(Object o) { if(o == null) { System.out.print("null"); } else if((o instanceof Integer) || (o instanceof Long) || (o instanceof Double) || (o instanceof Boolean) || (o instanceof String)) { System.out.print(o); } else if(o.getClass().isArray()) { System.out.print("["); int len = Array.getLength(o); for(int i = 0; i < len; i++) { dumpObject(o); System.out.print(","); } System.out.print("]"); } else if(o instanceof JsonBase) { Field[] fields = o.getClass().getFields(); System.out.print("{"); for(Field f: fields) { if((f.getModifiers() & Modifier.PUBLIC) == 0 || (f.getModifiers() & Modifier.STATIC) != 0 || (f.getModifiers() & Modifier.TRANSIENT) != 0 || (f.getModifiers() & Modifier.VOLATILE) != 0 || (f.getModifiers() & Modifier.NATIVE) != 0) { continue; } System.out.print(f.getName() + ":"); try { dumpObject(f.get(o)); } catch(Exception ex) { System.out.print("INACCESSIBLE"); } System.out.print(","); } System.out.print("}"); } else if(List.class.isAssignableFrom(o.getClass())) { System.out.print("["); for(Object o1: (List)o) { dumpObject(o1); System.out.print(","); } System.out.print("]"); } else if(Map.class.isAssignableFrom(o.getClass())) { System.out.print("{"); Map m = Map.class.cast(o); for(Object k: m.keySet()) { System.out.print(k + ":"); dumpObject(m.get(k)); System.out.print(","); } System.out.print("}"); } else { System.out.print(o); } } @SuppressWarnings("rawtypes") protected static JsonValue mapObjectUnsafe(Object o) throws JsonException, IllegalAccessException { if(o == null) { return null; } else if(o instanceof Integer) { return Json.value((Integer)o); } else if(o instanceof Long) { return Json.value((Long)o); } else if(o instanceof Double) { return Json.value((Double)o); } else if(o instanceof Boolean) { return Json.value((Boolean)o); } else if(o instanceof String) { return Json.value((String)o); } else if(o.getClass().isArray()) { JsonArray rv = new JsonArray(); int len = Array.getLength(o); for(int i = 0; i < len; i++) { JsonValue v = mapObjectUnsafe(Array.get(o, i)); if(v != null) { rv.add(v); } } return rv; } else if(o instanceof JsonBase) { JsonObject rv = new JsonObject(); Field[] fields = o.getClass().getFields(); for(Field f: fields) { if((f.getModifiers() & Modifier.PUBLIC) == 0 || (f.getModifiers() & Modifier.STATIC) != 0 || (f.getModifiers() & Modifier.TRANSIENT) != 0 || (f.getModifiers() & Modifier.VOLATILE) != 0 || (f.getModifiers() & Modifier.NATIVE) != 0) { continue; } JsonValue v = mapObjectUnsafe(f.get(o)); if(v != null) { rv.add(f.getName(), v); } } return rv; } else if(List.class.isAssignableFrom(o.getClass())) { JsonArray rv = new JsonArray(); for(Object e: List.class.cast(o)) { JsonValue v = mapObjectUnsafe(e); if(v != null) { rv.add(v); } } return rv; } else if(Map.class.isAssignableFrom(o.getClass())) { JsonObject rv = new JsonObject(); Map m = Map.class.cast(o); for(Object k: m.keySet()) { JsonValue v = mapObjectUnsafe(m.get(k)); if(v != null) { rv.add((String)k, v); } } return rv; } else { throw new JsonException("Unsupported source type: " + o.getClass().getName()); } } public static <T> T mappedJsonResponse(JsonValue src, Class<T> c) throws JsonException { try { return mappedJsonResponseUnsafe(src, c, null); } catch(JsonException ex) { throw ex; } catch(Exception ex) { throw new JsonException("Transmogrification bug. Check ur code.", ex); } } public static <T> T mappedJsonResponse(InputStream is, Class<T> c) throws IOException, JsonException { JsonValue src = Json.parse(new InputStreamReader(is, "UTF-8")); return mappedJsonResponse(src, c); } @SuppressWarnings("unchecked") public static Map<String, Object> genericJsonResponse(JsonObject v) throws JsonException { return (Map<String, Object>)genericJsonResponse((JsonValue)v); } @SuppressWarnings("unchecked") public static List<Object> genericJsonResponse(JsonArray v) throws JsonException { return (List<Object>)genericJsonResponse((JsonValue)v); } public static Object genericJsonResponse(JsonValue v) throws JsonException { if(v.isNumber()) { return v.asDouble(); } else if(v.isString()) { return v.asString(); } else if(v.isArray()) { List<Object> rv = new ArrayList<Object>(); JsonArray a = v.asArray(); int len = a.size(); for(int i = 0; i < len; i++) { rv.add(genericJsonResponse(a.get(i))); } return rv; } else if(v.isBoolean()) { return v.asBoolean(); } else if(v.isObject()) { Iterator<Member> i = v.asObject().iterator(); HashMap<String, Object> rv = new HashMap<String, Object>(); while(i.hasNext()) { Member m = i.next(); JsonValue sv = m.getValue(); rv.put(m.getName(), genericJsonResponse(sv)); } return rv; } else { throw new JsonException("Unsupported type: " + v + " (" + v.getClass().getName() + ")"); } } @SuppressWarnings({ "unchecked", "rawtypes" }) protected static <T> T mappedJsonResponseUnsafe(JsonValue src, Class<T> c, Class pc) throws InvocationTargetException, NoSuchMethodException, IllegalAccessException, InstantiationException, JsonException { // log.debug("Value: " + src + "; type: " + c.getName()); if(src.isNull()) { return null; } else if(c.equals(Integer.class) || c.equals(Integer.TYPE)) { if(!src.isNumber()) { throw new JsonException("Value not an integer."); } Constructor<T> cns = c.getConstructor(Integer.TYPE); return cns.newInstance(src.asInt()); } else if(c.equals(Long.class) || c.equals(Long.TYPE)) { if(!src.isNumber()) { throw new JsonException("Value not a long."); } Constructor<T> cns = c.getConstructor(Long.TYPE); return cns.newInstance(src.asLong()); } else if(c.equals(Double.class) || c.equals(Double.TYPE)) { if(!src.isNumber()) { throw new JsonException("Value not a double."); } Constructor<T> cns = c.getConstructor(Double.TYPE); return cns.newInstance(src.asDouble()); } else if(c.equals(Boolean.class) || c.equals(Boolean.TYPE)) { if(!src.isBoolean()) { throw new JsonException("Value not a boolean."); } Constructor<T> cns = c.getConstructor(Boolean.TYPE); return cns.newInstance(src.asBoolean()); } else if(c.equals(String.class)) { if(!src.isString()) { throw new JsonException("Value not a string."); } Constructor<T> cns = c.getConstructor(String.class); return cns.newInstance(src.asString()); } else if(c.isArray()) { if(!src.isArray()) { throw new JsonException("Value not an array."); } JsonArray arr = src.asArray(); Class<?> styp = c.getComponentType(); int size = arr.size(); Object array = Array.newInstance(styp, size); for(int i = 0; i < size; i++) { Array.set(array, i, mappedJsonResponseUnsafe(arr.get(i), styp, null)); } return c.cast(array); } else if(List.class.isAssignableFrom(c)) { if(!src.isArray()) { throw new JsonException("Value not an array."); } JsonArray arr = src.asArray(); int size = arr.size(); T rv; if(c.equals(List.class)) { rv = c.cast(new ArrayList<Object>()); } else { Constructor<T> cns = c.getConstructor(); rv = cns.newInstance(); } for(int i = 0; i < size; i++) { if(pc != null) { ((List)rv).add(mappedJsonResponseUnsafe(arr.get(i), pc, null)); } else { ((List)rv).add(genericJsonResponse(arr.get(i))); } } return rv; } else if(Map.class.isAssignableFrom(c)) { if(!src.isObject()) { throw new JsonException("Value not an object."); } JsonObject obj = src.asObject(); T rv; if(c.equals(Map.class)) { rv = c.cast(new HashMap<String, Object>()); } else { Constructor<T> cns = c.getConstructor(); rv = cns.newInstance(); } Iterator<Member> i = obj.iterator(); while(i.hasNext()) { Member m = i.next(); if(pc != null) { ((HashMap)rv).put(m.getName(), mappedJsonResponseUnsafe(m.getValue(), pc, null)); } else { ((HashMap)rv).put(m.getName(), genericJsonResponse(m.getValue())); } } return rv; } else if(JsonBase.class.isAssignableFrom(c)) { if(!src.isObject()) { throw new JsonException("Value not an object."); } JsonObject obj = src.asObject(); Constructor<T> cns = c.getConstructor(); T rv = cns.newInstance(); Field[] fields = c.getFields(); for(Field f: fields) { if((f.getModifiers() & Modifier.PUBLIC) == 0 || (f.getModifiers() & Modifier.STATIC) != 0 || (f.getModifiers() & Modifier.TRANSIENT) != 0 || (f.getModifiers() & Modifier.VOLATILE) != 0 || (f.getModifiers() & Modifier.NATIVE) != 0) { continue; } String name = f.getName(); Class<?> typ = f.getType(); if(!jsonHasField(obj, name)) { f.set(rv, null); } else { if(typ.equals(List.class)) { f.set(rv, mappedJsonResponseUnsafe(obj.get(name), typ, (Class)((ParameterizedType)f.getGenericType()).getActualTypeArguments()[0])); } else if(typ.equals(Map.class)) { f.set(rv, mappedJsonResponseUnsafe(obj.get(name), typ, (Class)(((ParameterizedType)f.getGenericType()).getActualTypeArguments()[1]))); } else { f.set(rv, mappedJsonResponseUnsafe(obj.get(name), typ, null)); } } } return rv; } else { throw new JsonException("Unsupported type: " + c.getSimpleName()); } } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ package org.apache.poi.hdgf.streams; import java.io.FileInputStream; import java.io.InputStream; import org.apache.poi.hdgf.chunks.Chunk; import org.apache.poi.hdgf.chunks.ChunkFactory; import org.apache.poi.hdgf.pointers.Pointer; import org.apache.poi.hdgf.pointers.PointerFactory; import org.apache.poi.poifs.filesystem.DocumentEntry; import org.apache.poi.poifs.filesystem.POIFSFileSystem; import org.apache.poi.POIDataSamples; public final class TestStreamComplex extends StreamTest { private byte[] contents; private int trailerPointerAt = 0x24; private int trailerDataAt = 0x8a94; private ChunkFactory chunkFactory; private PointerFactory ptrFactory; protected void setUp() throws Exception { ptrFactory = new PointerFactory(11); chunkFactory = new ChunkFactory(11); InputStream is = POIDataSamples.getDiagramInstance().openResourceAsStream("Test_Visio-Some_Random_Text.vsd"); POIFSFileSystem filesystem = new POIFSFileSystem(is); DocumentEntry docProps = (DocumentEntry)filesystem.getRoot().getEntry("VisioDocument"); // Grab the document stream contents = new byte[docProps.getSize()]; filesystem.createDocumentInputStream("VisioDocument").read(contents); } /** * Test creating the trailer, but not looking for children */ public void testTrailer() { // Find the trailer Pointer trailerPtr = ptrFactory.createPointer(contents, trailerPointerAt); assertEquals(20, trailerPtr.getType()); assertEquals(trailerDataAt, trailerPtr.getOffset()); Stream stream = Stream.createStream(trailerPtr, contents, chunkFactory, ptrFactory); assertTrue(stream instanceof TrailerStream); TrailerStream ts = (TrailerStream)stream; assertNotNull(ts.getChildPointers()); assertNull(ts.getPointedToStreams()); assertEquals(20, ts.getChildPointers().length); assertEquals(0x16, ts.getChildPointers()[0].getType()); assertEquals(0x17, ts.getChildPointers()[1].getType()); assertEquals(0x17, ts.getChildPointers()[2].getType()); assertEquals(0xff, ts.getChildPointers()[3].getType()); } public void testChunks() { Pointer trailerPtr = ptrFactory.createPointer(contents, trailerPointerAt); TrailerStream ts = (TrailerStream) Stream.createStream(trailerPtr, contents, chunkFactory, ptrFactory); // Should be 7th one Pointer chunkPtr = ts.getChildPointers()[5]; assertFalse(chunkPtr.destinationHasStrings()); assertTrue(chunkPtr.destinationHasChunks()); assertFalse(chunkPtr.destinationHasPointers()); Stream stream = Stream.createStream(chunkPtr, contents, chunkFactory, ptrFactory); assertNotNull(stream); assertTrue(stream instanceof ChunkStream); // Now find the chunks within it ChunkStream cs = (ChunkStream)stream; cs.findChunks(); } public void testStrings() { Pointer trailerPtr = ptrFactory.createPointer(contents, trailerPointerAt); TrailerStream ts = (TrailerStream) Stream.createStream(trailerPtr, contents, chunkFactory, ptrFactory); // Should be the 1st one Pointer stringPtr = ts.getChildPointers()[0]; assertTrue(stringPtr.destinationHasStrings()); assertFalse(stringPtr.destinationHasChunks()); assertFalse(stringPtr.destinationHasPointers()); Stream stream = Stream.createStream(stringPtr, contents, chunkFactory, ptrFactory); assertNotNull(stream); assertTrue(stream instanceof StringsStream); } public void testPointerToStrings() { // The stream at 0x347f has strings // The stream at 0x4312 has a pointer to 0x347f // The stream at 0x44d3 has a pointer to 0x4312 // (it's the 2nd one of 3, and the block is compressed) TestPointer ptr44d3 = new TestPointer(true, 0x44d3, 0x51, 0x4e, (short)0x56); ptr44d3.hasPointers = true; PointerContainingStream s44d3 = (PointerContainingStream) Stream.createStream(ptr44d3, contents, chunkFactory, ptrFactory); // Type: 0d Addr: 014ff644 Offset: 4312 Len: 48 Format: 54 From: 44d3 Pointer ptr4312 = s44d3.getChildPointers()[1]; assertEquals(0x0d, ptr4312.getType()); assertEquals(0x4312, ptr4312.getOffset()); assertEquals(0x48, ptr4312.getLength()); assertEquals(0x54, ptr4312.getFormat()); assertTrue(ptr4312.destinationHasPointers()); assertFalse(ptr4312.destinationHasStrings()); PointerContainingStream s4312 = (PointerContainingStream) Stream.createStream(ptr4312, contents, chunkFactory, ptrFactory); // Check it has 0x347f // Type: 1f Addr: 01540004 Offset: 347f Len: 8e8 Format: 46 From: 4312 assertEquals(2, s4312.getChildPointers().length); Pointer ptr347f = s4312.getChildPointers()[0]; assertEquals(0x1f, ptr347f.getType()); assertEquals(0x347f, ptr347f.getOffset()); assertEquals(0x8e8, ptr347f.getLength()); assertEquals(0x46, ptr347f.getFormat()); assertFalse(ptr347f.destinationHasPointers()); assertTrue(ptr347f.destinationHasStrings()); // Find the children of 0x4312 assertNull(s4312.getPointedToStreams()); s4312.findChildren(contents); // Should have two, both strings assertNotNull(s4312.getPointedToStreams()); assertEquals(2, s4312.getPointedToStreams().length); assertTrue(s4312.getPointedToStreams()[0] instanceof StringsStream); assertTrue(s4312.getPointedToStreams()[1] instanceof StringsStream); } public void testTrailerContents() { Pointer trailerPtr = ptrFactory.createPointer(contents, trailerPointerAt); TrailerStream ts = (TrailerStream) Stream.createStream(trailerPtr, contents, chunkFactory, ptrFactory); assertNotNull(ts.getChildPointers()); assertNull(ts.getPointedToStreams()); assertEquals(20, ts.getChildPointers().length); ts.findChildren(contents); assertNotNull(ts.getChildPointers()); assertNotNull(ts.getPointedToStreams()); assertEquals(20, ts.getChildPointers().length); assertEquals(20, ts.getPointedToStreams().length); // Step down: // 8 -> 4 -> 5 -> 1 -> 0 == String assertNotNull(ts.getPointedToStreams()[8]); assertTrue(ts.getPointedToStreams()[8] instanceof PointerContainingStream); PointerContainingStream s8 = (PointerContainingStream)ts.getPointedToStreams()[8]; assertNotNull(s8.getPointedToStreams()); assertNotNull(s8.getPointedToStreams()[4]); assertTrue(s8.getPointedToStreams()[4] instanceof PointerContainingStream); PointerContainingStream s84 = (PointerContainingStream)s8.getPointedToStreams()[4]; assertNotNull(s84.getPointedToStreams()); assertNotNull(s84.getPointedToStreams()[5]); assertTrue(s84.getPointedToStreams()[5] instanceof PointerContainingStream); PointerContainingStream s845 = (PointerContainingStream)s84.getPointedToStreams()[5]; assertNotNull(s845.getPointedToStreams()); assertNotNull(s845.getPointedToStreams()[1]); assertTrue(s845.getPointedToStreams()[1] instanceof PointerContainingStream); PointerContainingStream s8451 = (PointerContainingStream)s845.getPointedToStreams()[1]; assertNotNull(s8451.getPointedToStreams()); assertNotNull(s8451.getPointedToStreams()[0]); assertTrue(s8451.getPointedToStreams()[0] instanceof StringsStream); assertTrue(s8451.getPointedToStreams()[1] instanceof StringsStream); } public void testChunkWithText() { // Parent ChunkStream is at 0x7194 // This is one of the last children of the trailer Pointer trailerPtr = ptrFactory.createPointer(contents, trailerPointerAt); TrailerStream ts = (TrailerStream) Stream.createStream(trailerPtr, contents, chunkFactory, ptrFactory); ts.findChildren(contents); assertNotNull(ts.getChildPointers()); assertNotNull(ts.getPointedToStreams()); assertEquals(20, ts.getChildPointers().length); assertEquals(20, ts.getPointedToStreams().length); assertEquals(0x7194, ts.getChildPointers()[13].getOffset()); assertEquals(0x7194, ts.getPointedToStreams()[13].getPointer().getOffset()); PointerContainingStream ps7194 = (PointerContainingStream) ts.getPointedToStreams()[13]; // First child is at 0x64b3 assertEquals(0x64b3, ps7194.getChildPointers()[0].getOffset()); assertEquals(0x64b3, ps7194.getPointedToStreams()[0].getPointer().getOffset()); ChunkStream cs = (ChunkStream)ps7194.getPointedToStreams()[0]; // Should be 26bc bytes un-compressed assertEquals(0x26bc, cs.getStore().getContents().length); // And should have lots of children assertEquals(131, cs.getChunks().length); // One of which is Text boolean hasText = false; for(int i=0; i<cs.getChunks().length; i++) { if(cs.getChunks()[i].getName().equals("Text")) { hasText = true; } } assertTrue(hasText); // Which is the 72nd command assertEquals("Text", cs.getChunks()[72].getName()); Chunk text = cs.getChunks()[72]; assertEquals("Text", text.getName()); // Which contains our text assertEquals(1, text.getCommands().length); assertEquals("Test View\n", text.getCommands()[0].getValue()); // Almost at the end is some more text assertEquals("Text", cs.getChunks()[128].getName()); text = cs.getChunks()[128]; assertEquals("Text", text.getName()); assertEquals(1, text.getCommands().length); assertEquals("Some random text, on a page\n", text.getCommands()[0].getValue()); } }
/* * $Id$ * * SARL is an general-purpose agent programming language. * More details on http://www.sarl.io * * Copyright (C) 2014-2021 the original authors or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.sarl.lang.mwe2.externalspec; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; import com.google.inject.Injector; import org.eclipse.xtext.util.Strings; import org.eclipse.xtext.xbase.lib.Pure; import org.eclipse.xtext.xtext.generator.IGuiceAwareGeneratorComponent; import io.sarl.lang.mwe2.keywords.GrammarKeywordAccessConfig; /** * The configuration for the external highlighting tools. * * @author $Author: sgalland$ * @version $FullVersion$ * @mavengroupid $GroupId$ * @mavenartifactid $ArtifactId$ */ public class ExternalHighlightingConfig implements IGuiceAwareGeneratorComponent { /** Do not add the native types by default. */ public static final boolean DEFAULT_ADD_NATIVE_TYPES = false; /** Do not use the keywords and ignored keywords from the {@link GrammarKeywordAccessConfig}. */ public static final boolean DEFAULT_INHERIT_GRAMMAR_KEYWORD_ACCESS = false; private final Set<String> additionalLiterals = new TreeSet<>(); private final Set<String> additionalKeywords = new TreeSet<>(); private final Set<String> excludedKeywords = new TreeSet<>(); private final Set<String> additionalPunctuation = new TreeSet<>(); private final Set<String> specialKeywords = new TreeSet<>(); private final Set<String> typeDeclarationKeywords = new TreeSet<>(); private Boolean addNativeTypes; private Boolean inheritFromGrammarKeywordAccess; private ColorConfig colors = new ColorConfig(); @Override public void initialize(Injector injector) { injector.injectMembers(this); } /** Indicates if the keywords and ignored keywords from the {@link GrammarKeywordAccessConfig} * are used. * * @param inheritFromGrammarKeywordAccess <code>true</code> for using inherited configuration. */ public void setInheritFromGrammarKeywordAccesss(boolean inheritFromGrammarKeywordAccess) { this.inheritFromGrammarKeywordAccess = inheritFromGrammarKeywordAccess; } /** Replies if the keywords and ignored keywords from the {@link GrammarKeywordAccessConfig} * are used. * * @return <code>true</code> for using inherited configuration. */ @Pure public boolean getInheritFromGrammarKeywordAccesss() { if (this.inheritFromGrammarKeywordAccess == null) { return DEFAULT_INHERIT_GRAMMAR_KEYWORD_ACCESS; } return this.inheritFromGrammarKeywordAccess.booleanValue(); } /** Indicates if the native types must be added in the keyword list. * * @param addNativeTypes <code>true</code> for adding the native types. */ public void setAddNativeTypes(boolean addNativeTypes) { this.addNativeTypes = addNativeTypes; } /** Replies if the native types must be added in the keyword list. * * @return <code>true</code> for adding the native types. */ @Pure public boolean getAddNativeTypes() { if (this.addNativeTypes == null) { return DEFAULT_ADD_NATIVE_TYPES; } return this.addNativeTypes.booleanValue(); } /** Add a literal that is not inside the SARL grammar. * * @param literal the additional literal. */ public void addLiteral(String literal) { if (!Strings.isEmpty(literal)) { this.additionalLiterals.add(literal); } } /** Replies the literals that are not inside the SARL grammar. * * @return the additional literals. */ @Pure public Set<String> getLiterals() { return this.additionalLiterals; } /** Add a keyword that is not inside the SARL grammar. * * @param keyword the additional keyword. */ public void addKeyword(String keyword) { if (!Strings.isEmpty(keyword)) { this.additionalKeywords.add(keyword); } } /** Replies the keywords that are not inside the SARL grammar. * * @return the additional keywords. */ @Pure public Set<String> getKeywords() { return this.additionalKeywords; } /** Ignore a keyword. * * @param keyword the keyword to ignore. */ public void addIgnoreKeyword(String keyword) { if (!Strings.isEmpty(keyword)) { this.excludedKeywords.add(keyword); } } /** Replies the keywords to ignore. * * @return the keywords to ignore. */ @Pure public Set<String> getIgnoredKeywords() { return this.excludedKeywords; } /** Mark a keyword as special. The meaning of a special keyword depends on the highlighting * generator. * * <p>The keyword will be ignored if it is not a keyword from the grammars or added with * {@link #addKeyword(String)}. * * <p>A special keyword cannot be a type declaration keyword. * * @param keyword the keyword to mark as special */ public void addSpecialKeyword(String keyword) { if (!Strings.isEmpty(keyword)) { this.specialKeywords.add(keyword); } } /** Replies the special keywords. The meaning of a special keyword depends on the highlighting * generator. * * @return the special keywords. */ @Pure public Set<String> getSpecialKeywords() { return this.specialKeywords; } /** Mark a keyword as a type declaration keyword. The meaning of a type declaration keyword depends on the highlighting * generator. * * <p>The keyword will be ignored if it is not a keyword from the grammars or added with * {@link #addKeyword(String)}. * * <p>A type declaration keyword cannot be a special keyword. * * @param keyword the keyword to mark as special */ public void addTypeDeclarationKeyword(String keyword) { if (!Strings.isEmpty(keyword)) { this.typeDeclarationKeywords.add(keyword); } } /** Replies the type declaration keywords. The meaning of a type declaration keyword depends on the highlighting * generator. * * @return the special keywords. */ @Pure public Set<String> getTypeDeclarationKeywords() { return this.typeDeclarationKeywords; } /** Add a punctuation symbol that is not inside the SARL grammar. * * @param symbol the additional punctuation symbol. */ public void addPunctuation(String symbol) { if (!Strings.isEmpty(symbol)) { this.additionalPunctuation.add(symbol); } } /** Replies the punctuation symbols that are not inside the SARL grammar. * * @return the additional punctuation symbols. */ @Pure public Set<String> getPunctuation() { return this.additionalPunctuation; } /** Set the color configuration. * * @param colors the color configuration. */ public void setColors(ColorConfig colors) { this.colors = colors; } /** Replies the color configuration. * * @return the color configuration, or {@code null} */ @Pure public ColorConfig getColors() { return this.colors; } /** * Color for LaTeX. * * @author $Author: sgalland$ * @version $FullVersion$ * @mavengroupid $GroupId$ * @mavenartifactid $ArtifactId$ */ public static class Color { private static final int MIN_COLOR = 0; private static final int MAX_COLOR = 255; private String name = "SARLdefault"; //$NON-NLS-1$ private int red; private int green; private int blue; /** Change the name of the color. * * @param name the name of the color. */ public void setName(String name) { this.name = name; } /** Replies the name of the color. * * @return the name of the color. */ public String getName() { return this.name; } /** Set the color components. * * <p>The Red-Green-Blue components are integers between 0 (inclusive) and 255 (inclusive). * The components are separated by any character that is not a digit. * * @param rgb the color. */ public void setRgb(String rgb) { final String[] components = rgb.split("[^0-9]+"); //$NON-NLS-1$ this.red = MIN_COLOR; if (components.length > 0) { this.red = Integer.valueOf(components[0]); if (this.red < MIN_COLOR || this.red > MAX_COLOR) { throw new NumberFormatException("red is not between " //$NON-NLS-1$ + MAX_COLOR + " and " + MAX_COLOR); //$NON-NLS-1$ } } this.green = MIN_COLOR; if (components.length > 1) { this.green = Integer.valueOf(components[1]); if (this.green < MIN_COLOR || this.green > MAX_COLOR) { throw new NumberFormatException("green is not between " //$NON-NLS-1$ + MIN_COLOR + " and " + MAX_COLOR); //$NON-NLS-1$ } } this.blue = MIN_COLOR; if (components.length > 1) { this.blue = Integer.valueOf(components[2]); if (this.blue < MIN_COLOR || this.blue > MAX_COLOR) { throw new NumberFormatException("blue is not between " //$NON-NLS-1$ + MIN_COLOR + " and " + MAX_COLOR); //$NON-NLS-1$ } } } /** Replies the red component. * * @return the red. */ public int getRed() { return this.red; } /** Replies the green component. * * @return the green. */ public int getGreen() { return this.green; } /** Replies the blue component. * * @return the blue. */ public int getBlue() { return this.blue; } } /** * Color configuration for LaTeX. * * @author $Author: sgalland$ * @version $FullVersion$ * @mavengroupid $GroupId$ * @mavenartifactid $ArtifactId$ */ public static class ColorConfig { private static final String DEFAULT_COLOR = "black"; //$NON-NLS-1$ private final Map<String, Color> colors = new TreeMap<>(); private String commentColor; private String stringColor; private String keywordColor; private String identifierColor; private String lineNumberColor; /** Set a color. * * @param color the color. */ public void setColor(Color color) { if (color != null) { this.colors.put(color.getName(), color); } } /** Replies the colors. * * @return the colors. */ public Map<String, Color> getColors() { return this.colors; } /** Set the color of the comments. * * @param name the name of the color to use. */ public void setCommentColor(String name) { this.commentColor = name; } /** Replies the color of the comments. * * @return the name of the color to use. */ public String getCommentColor() { if (Strings.isEmpty(this.commentColor)) { return DEFAULT_COLOR; } return this.commentColor; } /** Set the color of the strings. * * @param name the name of the color to use. */ public void setStringColor(String name) { this.stringColor = name; } /** Replies the color of the strings. * * @return the name of the color to use. */ public String getStringColor() { if (Strings.isEmpty(this.stringColor)) { return DEFAULT_COLOR; } return this.stringColor; } /** Set the color of the keywords. * * @param name the name of the color to use. */ public void setKeywordColor(String name) { this.keywordColor = name; } /** Replies the color of the keywords. * * @return the name of the color to use. */ public String getKeywordColor() { if (Strings.isEmpty(this.keywordColor)) { return DEFAULT_COLOR; } return this.keywordColor; } /** Set the color of the identifiers. * * @param name the name of the color to use. */ public void setIdentifierColor(String name) { this.identifierColor = name; } /** Replies the color of the identifiers. * * @return the name of the color to use. */ public String getIdentifierColor() { if (Strings.isEmpty(this.identifierColor)) { return DEFAULT_COLOR; } return this.identifierColor; } /** Set the color of the line numbers. * * @param name the name of the color to use. */ public void setLineNumberColor(String name) { this.lineNumberColor = name; } /** Replies the color of the line numbers. * * @return the name of the color to use. */ public String getLineNumberColor() { if (Strings.isEmpty(this.lineNumberColor)) { return DEFAULT_COLOR; } return this.lineNumberColor; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache license, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the license for the specific language governing permissions and * limitations under the license. */ package org.apache.logging.log4j.util; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.net.URL; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.Collection; import java.util.Enumeration; import java.util.LinkedHashSet; /** * <em>Consider this class private.</em> Utility class for ClassLoaders. * @see ClassLoader * @see RuntimePermission * @see Thread#getContextClassLoader() * @see ClassLoader#getSystemClassLoader() */ public final class LoaderUtil { private LoaderUtil() {} /** * System property to set to ignore the thread context ClassLoader. * * @since 2.1 */ public static final String IGNORE_TCCL_PROPERTY = "log4j.ignoreTCL"; private static final SecurityManager SECURITY_MANAGER = System.getSecurityManager(); // this variable must be lazily loaded; otherwise, we get a nice circular class loading problem where LoaderUtil // wants to use PropertiesUtil, but then PropertiesUtil wants to use LoaderUtil. private static Boolean ignoreTCCL; private static final boolean GET_CLASS_LOADER_DISABLED; private static final PrivilegedAction<ClassLoader> TCCL_GETTER = new ThreadContextClassLoaderGetter(); static { if (SECURITY_MANAGER != null) { boolean getClassLoaderDisabled; try { SECURITY_MANAGER.checkPermission(new RuntimePermission("getClassLoader")); getClassLoaderDisabled = false; } catch (final SecurityException ignored) { getClassLoaderDisabled = true; } GET_CLASS_LOADER_DISABLED = getClassLoaderDisabled; } else { GET_CLASS_LOADER_DISABLED = false; } } /** * Gets the current Thread ClassLoader. Returns the system ClassLoader if the TCCL is {@code null}. If the * system ClassLoader is {@code null} as well, then the ClassLoader for this class is returned. * If running with a {@link SecurityManager} that does not allow access to the Thread ClassLoader or system * ClassLoader, then the ClassLoader for this class is returned. * * @return the current ThreadContextClassLoader. */ public static ClassLoader getThreadContextClassLoader() { if (GET_CLASS_LOADER_DISABLED) { // we can at least get this class's ClassLoader regardless of security context // however, if this is null, there's really no option left at this point return LoaderUtil.class.getClassLoader(); } return SECURITY_MANAGER == null ? TCCL_GETTER.run() : AccessController.doPrivileged(TCCL_GETTER); } private static class ThreadContextClassLoaderGetter implements PrivilegedAction<ClassLoader> { @Override public ClassLoader run() { final ClassLoader cl = Thread.currentThread().getContextClassLoader(); if (cl != null) { return cl; } final ClassLoader ccl = LoaderUtil.class.getClassLoader(); return ccl == null && !GET_CLASS_LOADER_DISABLED ? ClassLoader.getSystemClassLoader() : ccl; } } /** * Loads a class by name. This method respects the {@link #IGNORE_TCCL_PROPERTY} Log4j property. If this property * is specified and set to anything besides {@code false}, then the default ClassLoader will be used. * * @param className The class name. * @return the Class for the given name. * @throws ClassNotFoundException if the specified class name could not be found * @since 2.1 */ public static Class<?> loadClass(final String className) throws ClassNotFoundException { if (isIgnoreTccl()) { return Class.forName(className); } try { return getThreadContextClassLoader().loadClass(className); } catch (final Throwable ignored) { return Class.forName(className); } } /** * Loads and instantiates a Class using the default constructor. * * @param className The class name. * @return new instance of the class. * @throws ClassNotFoundException if the class isn't available to the usual ClassLoaders * @throws IllegalAccessException if the class can't be instantiated through a public constructor * @throws InstantiationException if there was an exception whilst instantiating the class * @throws NoSuchMethodException if there isn't a no-args constructor on the class * @throws InvocationTargetException if there was an exception whilst constructing the class * @since 2.1 */ public static Object newInstanceOf(final String className) throws ClassNotFoundException, IllegalAccessException, InstantiationException, NoSuchMethodException, InvocationTargetException { final Class<?> clazz = loadClass(className); try { return clazz.getConstructor().newInstance(); } catch (final NoSuchMethodException ignored) { // FIXME: looking at the code for Class.newInstance(), this seems to do the same thing as above return clazz.newInstance(); } } /** * Loads and instantiates a derived class using its default constructor. * * @param className The class name. * @param clazz The class to cast it to. * @param <T> The type of the class to check. * @return new instance of the class cast to {@code T} * @throws ClassNotFoundException if the class isn't available to the usual ClassLoaders * @throws IllegalAccessException if the class can't be instantiated through a public constructor * @throws InstantiationException if there was an exception whilst instantiating the class * @throws NoSuchMethodException if there isn't a no-args constructor on the class * @throws InvocationTargetException if there was an exception whilst constructing the class * @throws ClassCastException if the constructed object isn't type compatible with {@code T} * @since 2.1 */ public static <T> T newCheckedInstanceOf(final String className, final Class<T> clazz) throws ClassNotFoundException, NoSuchMethodException, InvocationTargetException, InstantiationException, IllegalAccessException { return clazz.cast(newInstanceOf(className)); } private static boolean isIgnoreTccl() { // we need to lazily initialize this, but concurrent access is not an issue if (ignoreTCCL == null) { final String ignoreTccl = PropertiesUtil.getProperties().getStringProperty(IGNORE_TCCL_PROPERTY, null); ignoreTCCL = ignoreTccl != null && !"false".equalsIgnoreCase(ignoreTccl.trim()); } return ignoreTCCL; } /** * Finds classpath {@linkplain URL resources}. * * @param resource the name of the resource to find. * @return a Collection of URLs matching the resource name. If no resources could be found, then this will be empty. * @since 2.1 */ public static Collection<URL> findResources(final String resource) { final Collection<UrlResource> urlResources = findUrlResources(resource); final Collection<URL> resources = new LinkedHashSet<>(urlResources.size()); for (final UrlResource urlResource : urlResources) { resources.add(urlResource.getUrl()); } return resources; } static Collection<UrlResource> findUrlResources(final String resource) { final ClassLoader[] candidates = { getThreadContextClassLoader(), LoaderUtil.class.getClassLoader(), GET_CLASS_LOADER_DISABLED ? null : ClassLoader.getSystemClassLoader() }; final Collection<UrlResource> resources = new LinkedHashSet<>(); for (final ClassLoader cl : candidates) { if (cl != null) { try { final Enumeration<URL> resourceEnum = cl.getResources(resource); while (resourceEnum.hasMoreElements()) { resources.add(new UrlResource(cl, resourceEnum.nextElement())); } } catch (final IOException e) { e.printStackTrace(); } } } return resources; } /** * {@link URL} and {@link ClassLoader} pair. */ static class UrlResource { private final ClassLoader classLoader; private final URL url; public UrlResource(final ClassLoader classLoader, final URL url) { this.classLoader = classLoader; this.url = url; } public ClassLoader getClassLoader() { return classLoader; } public URL getUrl() { return url; } @Override public boolean equals(final Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } final UrlResource that = (UrlResource) o; if (classLoader != null ? !classLoader.equals(that.classLoader) : that.classLoader != null) { return false; } if (url != null ? !url.equals(that.url) : that.url != null) { return false; } return true; } @Override public int hashCode() { int result = classLoader != null ? classLoader.hashCode() : 0; result = 31 * result + (url != null ? url.hashCode() : 0); return result; } } }
/* * Copyright (C) 2008 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.google.common.base; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.annotations.Beta; import com.google.common.annotations.GwtCompatible; import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.io.Serializable; import java.util.Iterator; import javax.annotation.Nullable; /** * A function from {@code A} to {@code B} with an associated <i>reverse</i> function from {@code B} * to {@code A}; used for converting back and forth between <i>different representations of the same * information</i>. * * <h3>Invertibility</h3> * * <p>The reverse operation <b>may</b> be a strict <i>inverse</i> (meaning that {@code * converter.reverse().convert(converter.convert(a)).equals(a)} is always true). However, it is very * common (perhaps <i>more</i> common) for round-trip conversion to be <i>lossy</i>. Consider an * example round-trip using {@link com.google.common.primitives.Doubles#stringConverter}: * * <ol> * <li>{@code stringConverter().convert("1.00")} returns the {@code Double} value {@code 1.0} * <li>{@code stringConverter().reverse().convert(1.0)} returns the string {@code "1.0"} -- * <i>not</i> the same string ({@code "1.00"}) we started with * </ol> * * <p>Note that it should still be the case that the round-tripped and original objects are * <i>similar</i>. * * <h3>Nullability</h3> * * <p>A converter always converts {@code null} to {@code null} and non-null references to non-null * references. It would not make sense to consider {@code null} and a non-null reference to be * "different representations of the same information", since one is distinguishable from * <i>missing</i> information and the other is not. The {@link #convert} method handles this null * behavior for all converters; implementations of {@link #doForward} and {@link #doBackward} are * guaranteed to never be passed {@code null}, and must never return {@code null}. * * * <h3>Common ways to use</h3> * * <p>Getting a converter: * * <ul> * <li>Use a provided converter implementation, such as {@link Enums#stringConverter}, * {@link com.google.common.primitives.Ints#stringConverter Ints.stringConverter} or the * {@linkplain #reverse reverse} views of these. * <li>Convert between specific preset values using * {@link com.google.common.collect.Maps#asConverter Maps.asConverter}. For example, use this to * create a "fake" converter for a unit test. It is unnecessary (and confusing) to <i>mock</i> * the {@code Converter} type using a mocking framework. * <li>Extend this class and implement its {@link #doForward} and {@link #doBackward} methods. * <li>If using Java 8, you may prefer to pass two lambda expressions or method references to the * {@link #from from} factory method. * </ul> * * <p>Using a converter: * * <ul> * <li>Convert one instance in the "forward" direction using {@code converter.convert(a)}. * <li>Convert multiple instances "forward" using {@code converter.convertAll(as)}. * <li>Convert in the "backward" direction using {@code converter.reverse().convert(b)} or {@code * converter.reverse().convertAll(bs)}. * <li>Use {@code converter} or {@code converter.reverse()} anywhere a {@link Function} is accepted * <li><b>Do not</b> call {@link #doForward} or {@link #doBackward} directly; these exist only to be * overridden. * </ul> * * <h3>Example</h3> * * <pre> * return new Converter&lt;Integer, String&gt;() { * protected String doForward(Integer i) { * return Integer.toHexString(i); * } * * protected Integer doBackward(String s) { * return parseUnsignedInt(s, 16); * } * };</pre> * * <p>An alternative using Java 8:<pre> {@code * return Converter.from( * Integer::toHexString, * s -> parseUnsignedInt(s, 16));}</pre> * * @author Mike Ward * @author Kurt Alfred Kluever * @author Gregory Kick * @since 16.0 */ @Beta @GwtCompatible public abstract class Converter<A, B> implements Function<A, B> { private final boolean handleNullAutomatically; // We lazily cache the reverse view to avoid allocating on every call to reverse(). private transient Converter<B, A> reverse; /** Constructor for use by subclasses. */ protected Converter() { this(true); } /** * Constructor used only by {@code LegacyConverter} to suspend automatic null-handling. */ Converter(boolean handleNullAutomatically) { this.handleNullAutomatically = handleNullAutomatically; } // SPI methods (what subclasses must implement) /** * Returns a representation of {@code a} as an instance of type {@code B}. If {@code a} cannot be * converted, an unchecked exception (such as {@link IllegalArgumentException}) should be thrown. * * @param a the instance to convert; will never be null * @return the converted instance; <b>must not</b> be null */ protected abstract B doForward(A a); /** * Returns a representation of {@code b} as an instance of type {@code A}. If {@code b} cannot be * converted, an unchecked exception (such as {@link IllegalArgumentException}) should be thrown. * * @param b the instance to convert; will never be null * @return the converted instance; <b>must not</b> be null * @throws UnsupportedOperationException if backward conversion is not implemented; this should be * very rare. Note that if backward conversion is not only unimplemented but * unimplement<i>able</i> (for example, consider a {@code Converter<Chicken, ChickenNugget>}), * then this is not logically a {@code Converter} at all, and should just implement {@link * Function}. */ protected abstract A doBackward(B b); // API (consumer-side) methods /** * Returns a representation of {@code a} as an instance of type {@code B}. * * @return the converted value; is null <i>if and only if</i> {@code a} is null */ @Nullable @CanIgnoreReturnValue public final B convert(@Nullable A a) { return correctedDoForward(a); } @Nullable B correctedDoForward(@Nullable A a) { if (handleNullAutomatically) { // TODO(kevinb): we shouldn't be checking for a null result at runtime. Assert? return a == null ? null : checkNotNull(doForward(a)); } else { return doForward(a); } } @Nullable A correctedDoBackward(@Nullable B b) { if (handleNullAutomatically) { // TODO(kevinb): we shouldn't be checking for a null result at runtime. Assert? return b == null ? null : checkNotNull(doBackward(b)); } else { return doBackward(b); } } /** * Returns an iterable that applies {@code convert} to each element of {@code fromIterable}. The * conversion is done lazily. * * <p>The returned iterable's iterator supports {@code remove()} if the input iterator does. After * a successful {@code remove()} call, {@code fromIterable} no longer contains the corresponding * element. */ @CanIgnoreReturnValue public Iterable<B> convertAll(final Iterable<? extends A> fromIterable) { checkNotNull(fromIterable, "fromIterable"); return new Iterable<B>() { @Override public Iterator<B> iterator() { return new Iterator<B>() { private final Iterator<? extends A> fromIterator = fromIterable.iterator(); @Override public boolean hasNext() { return fromIterator.hasNext(); } @Override public B next() { return convert(fromIterator.next()); } @Override public void remove() { fromIterator.remove(); } }; } }; } /** * Returns the reversed view of this converter, which converts {@code this.convert(a)} back to a * value roughly equivalent to {@code a}. * * <p>The returned converter is serializable if {@code this} converter is. */ // TODO(kak): Make this method final @CanIgnoreReturnValue public Converter<B, A> reverse() { Converter<B, A> result = reverse; return (result == null) ? reverse = new ReverseConverter<A, B>(this) : result; } private static final class ReverseConverter<A, B> extends Converter<B, A> implements Serializable { final Converter<A, B> original; ReverseConverter(Converter<A, B> original) { this.original = original; } /* * These gymnastics are a little confusing. Basically this class has neither legacy nor * non-legacy behavior; it just needs to let the behavior of the backing converter shine * through. So, we override the correctedDo* methods, after which the do* methods should never * be reached. */ @Override protected A doForward(B b) { throw new AssertionError(); } @Override protected B doBackward(A a) { throw new AssertionError(); } @Override @Nullable A correctedDoForward(@Nullable B b) { return original.correctedDoBackward(b); } @Override @Nullable B correctedDoBackward(@Nullable A a) { return original.correctedDoForward(a); } @Override public Converter<A, B> reverse() { return original; } @Override public boolean equals(@Nullable Object object) { if (object instanceof ReverseConverter) { ReverseConverter<?, ?> that = (ReverseConverter<?, ?>) object; return this.original.equals(that.original); } return false; } @Override public int hashCode() { return ~original.hashCode(); } @Override public String toString() { return original + ".reverse()"; } private static final long serialVersionUID = 0L; } /** * Returns a converter whose {@code convert} method applies {@code secondConverter} to the result * of this converter. Its {@code reverse} method applies the converters in reverse order. * * <p>The returned converter is serializable if {@code this} converter and {@code secondConverter} * are. */ public final <C> Converter<A, C> andThen(Converter<B, C> secondConverter) { return doAndThen(secondConverter); } /** * Package-private non-final implementation of andThen() so only we can override it. */ <C> Converter<A, C> doAndThen(Converter<B, C> secondConverter) { return new ConverterComposition<A, B, C>(this, checkNotNull(secondConverter)); } private static final class ConverterComposition<A, B, C> extends Converter<A, C> implements Serializable { final Converter<A, B> first; final Converter<B, C> second; ConverterComposition(Converter<A, B> first, Converter<B, C> second) { this.first = first; this.second = second; } /* * These gymnastics are a little confusing. Basically this class has neither legacy nor * non-legacy behavior; it just needs to let the behaviors of the backing converters shine * through (which might even differ from each other!). So, we override the correctedDo* methods, * after which the do* methods should never be reached. */ @Override protected C doForward(A a) { throw new AssertionError(); } @Override protected A doBackward(C c) { throw new AssertionError(); } @Override @Nullable C correctedDoForward(@Nullable A a) { return second.correctedDoForward(first.correctedDoForward(a)); } @Override @Nullable A correctedDoBackward(@Nullable C c) { return first.correctedDoBackward(second.correctedDoBackward(c)); } @Override public boolean equals(@Nullable Object object) { if (object instanceof ConverterComposition) { ConverterComposition<?, ?, ?> that = (ConverterComposition<?, ?, ?>) object; return this.first.equals(that.first) && this.second.equals(that.second); } return false; } @Override public int hashCode() { return 31 * first.hashCode() + second.hashCode(); } @Override public String toString() { return first + ".andThen(" + second + ")"; } private static final long serialVersionUID = 0L; } /** * @deprecated Provided to satisfy the {@code Function} interface; use {@link #convert} instead. */ @Deprecated @Override @Nullable @CanIgnoreReturnValue public final B apply(@Nullable A a) { return convert(a); } /** * Indicates whether another object is equal to this converter. * * <p>Most implementations will have no reason to override the behavior of {@link Object#equals}. * However, an implementation may also choose to return {@code true} whenever {@code object} is a * {@link Converter} that it considers <i>interchangeable</i> with this one. "Interchangeable" * <i>typically</i> means that {@code Objects.equal(this.convert(a), that.convert(a))} is true for * all {@code a} of type {@code A} (and similarly for {@code reverse}). Note that a {@code false} * result from this method does not imply that the converters are known <i>not</i> to be * interchangeable. */ @Override public boolean equals(@Nullable Object object) { return super.equals(object); } // Static converters /** * Returns a converter based on <i>existing</i> forward and backward functions. Note that it is * unnecessary to create <i>new</i> classes implementing {@code Function} just to pass them in * here. Instead, simply subclass {@code Converter} and implement its {@link #doForward} and * {@link #doBackward} methods directly. * * <p>These functions will never be passed {@code null} and must not under any circumstances * return {@code null}. If a value cannot be converted, the function should throw an unchecked * exception (typically, but not necessarily, {@link IllegalArgumentException}). * * <p>The returned converter is serializable if both provided functions are. * * @since 17.0 */ public static <A, B> Converter<A, B> from( Function<? super A, ? extends B> forwardFunction, Function<? super B, ? extends A> backwardFunction) { return new FunctionBasedConverter<A, B>(forwardFunction, backwardFunction); } private static final class FunctionBasedConverter<A, B> extends Converter<A, B> implements Serializable { private final Function<? super A, ? extends B> forwardFunction; private final Function<? super B, ? extends A> backwardFunction; private FunctionBasedConverter( Function<? super A, ? extends B> forwardFunction, Function<? super B, ? extends A> backwardFunction) { this.forwardFunction = checkNotNull(forwardFunction); this.backwardFunction = checkNotNull(backwardFunction); } @Override protected B doForward(A a) { return forwardFunction.apply(a); } @Override protected A doBackward(B b) { return backwardFunction.apply(b); } @Override public boolean equals(@Nullable Object object) { if (object instanceof FunctionBasedConverter) { FunctionBasedConverter<?, ?> that = (FunctionBasedConverter<?, ?>) object; return this.forwardFunction.equals(that.forwardFunction) && this.backwardFunction.equals(that.backwardFunction); } return false; } @Override public int hashCode() { return forwardFunction.hashCode() * 31 + backwardFunction.hashCode(); } @Override public String toString() { return "Converter.from(" + forwardFunction + ", " + backwardFunction + ")"; } } /** * Returns a serializable converter that always converts or reverses an object to itself. */ @SuppressWarnings("unchecked") // implementation is "fully variant" public static <T> Converter<T, T> identity() { return (IdentityConverter<T>) IdentityConverter.INSTANCE; } /** * A converter that always converts or reverses an object to itself. Note that T is now a * "pass-through type". */ private static final class IdentityConverter<T> extends Converter<T, T> implements Serializable { static final IdentityConverter INSTANCE = new IdentityConverter(); @Override protected T doForward(T t) { return t; } @Override protected T doBackward(T t) { return t; } @Override public IdentityConverter<T> reverse() { return this; } @Override <S> Converter<T, S> doAndThen(Converter<T, S> otherConverter) { return checkNotNull(otherConverter, "otherConverter"); } /* * We *could* override convertAll() to return its input, but it's a rather pointless * optimization and opened up a weird type-safety problem. */ @Override public String toString() { return "Converter.identity()"; } private Object readResolve() { return INSTANCE; } private static final long serialVersionUID = 0L; } }
/** * Copyright 2011-2013 FoundationDB, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* The original from which this derives bore the following: */ /* Derby - Class org.apache.derby.impl.sql.compile.AlterTableNode Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.foundationdb.sql.parser; import com.foundationdb.sql.StandardException; /** * A AlterTableNode represents a DDL statement that alters a table. * It contains the name of the object to be created. * */ public class AlterTableNode extends DDLStatementNode { // The alter table action public TableElementList tableElementList = null; /** * updateStatistics will indicate that we are here for updating the * statistics. It could be statistics of just one index or all the * indexes on a given table. */ private boolean updateStatistics = false; /** * The flag updateStatisticsAll will tell if we are going to update the * statistics of all indexes or just one index on a table. */ private boolean updateStatisticsAll = false; /** * If statistic is getting updated for just one index, then * indexNameForUpdateStatistics will tell the name of the specific index * whose statistics need to be updated. */ private String indexNameForUpdateStatistics; public boolean compressTable = false; public boolean sequential = false; // The following three (purge, defragment and truncateEndOfTable) apply for // inplace compress. public boolean purge = false; public boolean defragment = false; public boolean truncateEndOfTable = false; public int behavior; // StatementType.DROP_XXX for TRUNCATE TABLE, DROP COLUMN private int changeType = UNKNOWN_TYPE; // DDLStatementNode.XXX_TYPE. private boolean truncateTable = false; /** * Initializer for a AlterTableNode for updating the statistics. The user * can ask for update statistic of all the indexes or only a specific index * * @param objectName The name of the table whose index(es) will have * their statistics updated. * @param updateStatisticsAll If true then update the statistics of all * the indexes on the table. If false, then update * the statistics of only the index provided as * 3rd parameter here * @param indexName Only used if updateStatisticsAll is set to * false. * * @exception StandardException Thrown on error */ public void init(Object objectName, Object updateStatisticsAll, Object indexName) throws StandardException { initAndCheck(objectName); this.updateStatisticsAll = ((Boolean)updateStatisticsAll).booleanValue(); this.indexNameForUpdateStatistics = (String)indexName; updateStatistics = true; } /** * Initializer for a TRUNCATE TABLE or COMPRESS using temporary tables * rather than inplace compress * * @param objectName The name of the table being altered * @param arg2 <code>int[]</code>: Behavior CASCADE or RESTRICTED * <code>Boolean</code>: Whether or not the COMPRESS is SEQUENTIAL * * @exception StandardException Thrown on error */ public void init(Object objectName, Object arg2) throws StandardException { initAndCheck(objectName); if (arg2 instanceof int[]) { int[] bh = (int[])arg2; this.behavior = bh[0]; truncateTable = true; } else { this.sequential = ((Boolean)arg2).booleanValue(); compressTable = true; } } /** * Initializer for a AlterTableNode for INPLACE COMPRESS * * @param objectName The name of the table being altered * @param purge PURGE during INPLACE COMPRESS? * @param defragment DEFRAGMENT during INPLACE COMPRESS? * @param truncateEndOfTable TRUNCATE END during INPLACE COMPRESS? * * @exception StandardException Thrown on error */ public void init(Object objectName, Object purge, Object defragment, Object truncateEndOfTable) throws StandardException { initAndCheck(objectName); this.purge = ((Boolean)purge).booleanValue(); this.defragment = ((Boolean)defragment).booleanValue(); this.truncateEndOfTable = ((Boolean)truncateEndOfTable).booleanValue(); compressTable = true; } /** * Initializer for a AlterTableNode * * @param objectName The name of the table being altered * @param tableElementList The alter table action * @param lockGranularity The new lock granularity, if any * @param changeType ADD_TYPE or DROP_TYPE * @param behavior If drop column is CASCADE or RESTRICTED * * @exception StandardException Thrown on error */ public void init(Object objectName, Object tableElementList, Object lockGranularity, Object changeType, Object behavior) throws StandardException { initAndCheck(objectName); this.tableElementList = (TableElementList)tableElementList; int[] ct = (int[])changeType, bh = (int[])behavior; this.changeType = ct[0]; this.behavior = bh[0]; switch (this.changeType ) { case ADD_TYPE: case DROP_TYPE: case MODIFY_TYPE: case LOCKING_TYPE: break; default: throw new StandardException("Not implemented"); } } /** * Fill this node with a deep copy of the given node. */ public void copyFrom(QueryTreeNode node) throws StandardException { super.copyFrom(node); AlterTableNode other = (AlterTableNode)node; this.tableElementList = (TableElementList) getNodeFactory().copyNode(other.tableElementList, getParserContext()); // this.lockGranularity = other.lockGranularity; this.updateStatistics = other.updateStatistics; this.updateStatisticsAll = other.updateStatisticsAll; this.indexNameForUpdateStatistics = other.indexNameForUpdateStatistics; this.compressTable = other.compressTable; this.sequential = other.sequential; this.purge = other.purge; this.defragment = other.defragment; this.truncateEndOfTable = other.truncateEndOfTable; this.behavior = other.behavior; this.changeType = other.changeType; this.truncateTable = other.truncateTable; } /** * Convert this object to a String. See comments in QueryTreeNode.java * for how this should be done for tree printing. * * @return This object as a String */ public String toString() { return super.toString() + "objectName: " + getObjectName() + "\n" + //"lockGranularity: " + lockGranularity + "\n" + "compressTable: " + compressTable + "\n" + "sequential: " + sequential + "\n" + "truncateTable: " + truncateTable + "\n" + "purge: " + purge + "\n" + "defragment: " + defragment + "\n" + "truncateEndOfTable: " + truncateEndOfTable + "\n" + "updateStatistics: " + updateStatistics + "\n" + "updateStatisticsAll: " + updateStatisticsAll + "\n" + "indexNameForUpdateStatistics: " + indexNameForUpdateStatistics + "\n"; } /** * Prints the sub-nodes of this object. See QueryTreeNode.java for * how tree printing is supposed to work. * @param depth The depth to indent the sub-nodes */ public void printSubNodes(int depth) { if (tableElementList != null) { printLabel(depth, "tableElementList: "); tableElementList.treePrint(depth + 1); } } public String statementToString() { if (truncateTable) return "TRUNCATE TABLE"; else return "ALTER TABLE"; } public boolean isUpdateStatistics() { return updateStatistics; } public boolean isUpdateStatisticsAll() { return updateStatisticsAll; } public String getIndexNameForUpdateStatistics() { return indexNameForUpdateStatistics; } public boolean isCompressTable() { return compressTable; } public boolean isTruncateTable() { return truncateTable; } public int getChangeType() { return changeType; } public int getBehavior() { return behavior; } public boolean isCascade() { return (behavior == StatementType.DROP_CASCADE); } /** * Accept the visitor for all visitable children of this node. * * @param v the visitor * * @exception StandardException on error */ void acceptChildren(Visitor v) throws StandardException { super.acceptChildren(v); if (tableElementList != null) { tableElementList.accept(v); } } }
package com.lb_stuff.kataparty.command; import com.lb_stuff.kataparty.KataPartyPlugin; import com.lb_stuff.kataparty.api.IMetadatable; import com.lb_stuff.kataparty.api.IParty; import com.lb_stuff.kataparty.api.IPartySettings.IMemberSettings; import com.lb_stuff.kataparty.api.KataPartyService; import com.lb_stuff.kataparty.api.Perms; import com.lb_stuff.kataparty.api.event.PartyDisbandEvent; import com.lb_stuff.kataparty.api.event.PartyMemberJoinEvent; import com.lb_stuff.kataparty.api.event.PartyMemberLeaveEvent; import org.bukkit.Bukkit; import org.bukkit.OfflinePlayer; import org.bukkit.command.Command; import org.bukkit.command.CommandSender; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.configuration.serialization.ConfigurationSerializable; import org.bukkit.entity.Player; import org.bukkit.event.EventHandler; import org.bukkit.event.EventPriority; import org.bukkit.event.Listener; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; public class PartyBackCommand extends TabbablePartyCommand implements Listener { public PartyBackCommand(KataPartyPlugin plugin) { super(plugin); } @EventHandler(priority = EventPriority.MONITOR, ignoreCancelled = true) public void onMemberLeave(PartyMemberLeaveEvent e) { BackMeta m = BackMeta.getFrom(inst.getServer().getOfflinePlayer(e.getMember().getUuid())); switch(e.getReason()) { case KICKED: case DISBAND: { m.setInfo(e.getMember().getParty(), null); } break; default: { m.setInfo(e.getMember().getParty(), new BackMeta.Info(e.getMember())); } break; } } @EventHandler(priority = EventPriority.MONITOR, ignoreCancelled = true) public void onMemberJoin(PartyMemberJoinEvent e) { BackMeta m = BackMeta.getFrom(inst.getServer().getOfflinePlayer(e.getApplicant().getUuid())); m.setInfo(e.getParty(), null); } @EventHandler(priority = EventPriority.MONITOR, ignoreCancelled = true) public void onPartyDisband(PartyDisbandEvent e) { for(OfflinePlayer p : KataPartyService.getAllPlayers(e.getParty().getMembers())) { BackMeta m = BackMeta.getFrom(p); m.setInfo(e.getParty(), null); } } private long getGracePeriod(Player p) { long period = 0; for(String perm : ((ConfigurationSection)inst.getConfig().get("back-command-grace-periods")).getValues(false).keySet()) { if(Perms.rejoinPeriod(p, perm)) { Long l = inst.getConfig().getLong("back-command-grace-periods."+perm); if(l == -1) { return Long.MAX_VALUE; } else if(l > period) { period = l; } } } return period; } @Override public List<String> onTabComplete(CommandSender sender, Command cmd, String label, String[] args) { List<String> ret = new ArrayList<>(); if(sender instanceof Player) { Player player = (Player)sender; if(args.length == 1) { for(IParty p : BackMeta.getFrom(player).getParties()) { if(p.getName().toLowerCase().startsWith(args[args.length-1].toLowerCase())) { ret.add(p.getName()); } } } } return ret; } @Override public boolean onCommand(CommandSender sender, Command cmd, String label, String[] args) { if(args.length > 1) { return false; } if(sender instanceof Player) { Player player = (Player)sender; BackMeta m = BackMeta.getFrom(player); IParty party = null; if(args.length == 1) { party = inst.getPartySet().findParty(args[0]); } else { long tick = Long.MIN_VALUE; for(IParty p : m.getParties()) { long t = m.getInfo(p).getTick(); if(t > tick) { tick = t; party = p; } } } if(party == null || !m.getParties().contains(party)) { if(args.length == 1) { inst.tellMessage(player, "never-in-party", args[0]); } else { inst.tellMessage(player, "never-in-any-party"); } } else { final long delta = KataPartyPlugin.getTick() - m.getInfo(party).getTick(); if(delta > getGracePeriod(player)) { inst.tellMessage(player, "grace-period-ended", party.getName()); m.setInfo(party, null); } else { party.newMember(m.getInfo(party).getSettings(), PartyMemberJoinEvent.Reason.VOLUNTARY); } } return true; } return false; } public static class BackMeta implements ConfigurationSerializable { @Override public Map<String, Object> serialize() { Map<String, Object> data = new HashMap<>(); for(Map.Entry<IParty, Info> e : leaves.entrySet()) { if(KataPartyPlugin.getInst().getPartySet().contains(e.getKey())) { data.put(e.getKey().getName(), e.getValue()); } } return data; } public BackMeta(final Map<String, Object> data) { Bukkit.getScheduler().runTask(KataPartyPlugin.getInst(), new Runnable(){@Override public void run() { for(Map.Entry<String, Object> e : data.entrySet()) { if(e.getKey().equals("==")) { continue; } leaves.put(KataPartyPlugin.getInst().getPartySet().findParty(e.getKey()), (Info)e.getValue()); } }}); } private Map<IParty, Info> leaves = new HashMap<>(); public BackMeta() { } public static void addTo(OfflinePlayer p) { KataPartyPlugin.getInst().getPlayerMetadata(p).set(BackMeta.class, new BackMeta()); } public static BackMeta getFrom(OfflinePlayer p) { IMetadatable m = KataPartyPlugin.getInst().getPlayerMetadata(p); if(m.get(BackMeta.class) == null) { addTo(p); } return (BackMeta)m.get(BackMeta.class); } public static void removeFrom(OfflinePlayer p) { KataPartyPlugin.getInst().getPlayerMetadata(p).set(BackMeta.class, null); } public Set<IParty> getParties() { return leaves.keySet(); } public Info getInfo(IParty p) { return leaves.get(p); } public void setInfo(IParty p, Info i) { if(i == null) { leaves.remove(p); } else { leaves.put(p, i); } } public static class Info implements ConfigurationSerializable { @Override public Map<String, Object> serialize() { Map<String, Object> data = new HashMap<>(); data.put("tick", tick); data.put("settings", settings); return data; } public Info(Map<String, Object> data) { tick = (Long)data.get("tick"); settings = (IMemberSettings)data.get("settings"); } private long tick = KataPartyPlugin.getTick(); private IMemberSettings settings; public Info(IMemberSettings s) { settings = s; } public long getTick() { return tick; } public void setTick(long t) { tick = t; } public IMemberSettings getSettings() { return settings; } public void setSettings(IMemberSettings s) { settings = s; } } } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver13; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import com.google.common.collect.ImmutableSet; import java.util.List; import com.google.common.collect.ImmutableList; import org.jboss.netty.buffer.ChannelBuffer; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFPortDescStatsReplyVer13 implements OFPortDescStatsReply { private static final Logger logger = LoggerFactory.getLogger(OFPortDescStatsReplyVer13.class); // version: 1.3 final static byte WIRE_VERSION = 4; final static int MINIMUM_LENGTH = 16; private final static long DEFAULT_XID = 0x0L; private final static Set<OFStatsReplyFlags> DEFAULT_FLAGS = ImmutableSet.<OFStatsReplyFlags>of(); private final static List<OFPortDesc> DEFAULT_ENTRIES = ImmutableList.<OFPortDesc>of(); // OF message fields private final long xid; private final Set<OFStatsReplyFlags> flags; private final List<OFPortDesc> entries; // // Immutable default instance final static OFPortDescStatsReplyVer13 DEFAULT = new OFPortDescStatsReplyVer13( DEFAULT_XID, DEFAULT_FLAGS, DEFAULT_ENTRIES ); // package private constructor - used by readers, builders, and factory OFPortDescStatsReplyVer13(long xid, Set<OFStatsReplyFlags> flags, List<OFPortDesc> entries) { if(flags == null) { throw new NullPointerException("OFPortDescStatsReplyVer13: property flags cannot be null"); } if(entries == null) { throw new NullPointerException("OFPortDescStatsReplyVer13: property entries cannot be null"); } this.xid = xid; this.flags = flags; this.entries = entries; } // Accessors for OF message fields @Override public OFVersion getVersion() { return OFVersion.OF_13; } @Override public OFType getType() { return OFType.STATS_REPLY; } @Override public long getXid() { return xid; } @Override public OFStatsType getStatsType() { return OFStatsType.PORT_DESC; } @Override public Set<OFStatsReplyFlags> getFlags() { return flags; } @Override public List<OFPortDesc> getEntries() { return entries; } public OFPortDescStatsReply.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFPortDescStatsReply.Builder { final OFPortDescStatsReplyVer13 parentMessage; // OF message fields private boolean xidSet; private long xid; private boolean flagsSet; private Set<OFStatsReplyFlags> flags; private boolean entriesSet; private List<OFPortDesc> entries; BuilderWithParent(OFPortDescStatsReplyVer13 parentMessage) { this.parentMessage = parentMessage; } @Override public OFVersion getVersion() { return OFVersion.OF_13; } @Override public OFType getType() { return OFType.STATS_REPLY; } @Override public long getXid() { return xid; } @Override public OFPortDescStatsReply.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public OFStatsType getStatsType() { return OFStatsType.PORT_DESC; } @Override public Set<OFStatsReplyFlags> getFlags() { return flags; } @Override public OFPortDescStatsReply.Builder setFlags(Set<OFStatsReplyFlags> flags) { this.flags = flags; this.flagsSet = true; return this; } @Override public List<OFPortDesc> getEntries() { return entries; } @Override public OFPortDescStatsReply.Builder setEntries(List<OFPortDesc> entries) { this.entries = entries; this.entriesSet = true; return this; } @Override public OFPortDescStatsReply build() { long xid = this.xidSet ? this.xid : parentMessage.xid; Set<OFStatsReplyFlags> flags = this.flagsSet ? this.flags : parentMessage.flags; if(flags == null) throw new NullPointerException("Property flags must not be null"); List<OFPortDesc> entries = this.entriesSet ? this.entries : parentMessage.entries; if(entries == null) throw new NullPointerException("Property entries must not be null"); // return new OFPortDescStatsReplyVer13( xid, flags, entries ); } } static class Builder implements OFPortDescStatsReply.Builder { // OF message fields private boolean xidSet; private long xid; private boolean flagsSet; private Set<OFStatsReplyFlags> flags; private boolean entriesSet; private List<OFPortDesc> entries; @Override public OFVersion getVersion() { return OFVersion.OF_13; } @Override public OFType getType() { return OFType.STATS_REPLY; } @Override public long getXid() { return xid; } @Override public OFPortDescStatsReply.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public OFStatsType getStatsType() { return OFStatsType.PORT_DESC; } @Override public Set<OFStatsReplyFlags> getFlags() { return flags; } @Override public OFPortDescStatsReply.Builder setFlags(Set<OFStatsReplyFlags> flags) { this.flags = flags; this.flagsSet = true; return this; } @Override public List<OFPortDesc> getEntries() { return entries; } @Override public OFPortDescStatsReply.Builder setEntries(List<OFPortDesc> entries) { this.entries = entries; this.entriesSet = true; return this; } // @Override public OFPortDescStatsReply build() { long xid = this.xidSet ? this.xid : DEFAULT_XID; Set<OFStatsReplyFlags> flags = this.flagsSet ? this.flags : DEFAULT_FLAGS; if(flags == null) throw new NullPointerException("Property flags must not be null"); List<OFPortDesc> entries = this.entriesSet ? this.entries : DEFAULT_ENTRIES; if(entries == null) throw new NullPointerException("Property entries must not be null"); return new OFPortDescStatsReplyVer13( xid, flags, entries ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFPortDescStatsReply> { @Override public OFPortDescStatsReply readFrom(ChannelBuffer bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property version == 4 byte version = bb.readByte(); if(version != (byte) 0x4) throw new OFParseError("Wrong version: Expected=OFVersion.OF_13(4), got="+version); // fixed value property type == 19 byte type = bb.readByte(); if(type != (byte) 0x13) throw new OFParseError("Wrong type: Expected=OFType.STATS_REPLY(19), got="+type); int length = U16.f(bb.readShort()); if(length < MINIMUM_LENGTH) throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); long xid = U32.f(bb.readInt()); // fixed value property statsType == 13 short statsType = bb.readShort(); if(statsType != (short) 0xd) throw new OFParseError("Wrong statsType: Expected=OFStatsType.PORT_DESC(13), got="+statsType); Set<OFStatsReplyFlags> flags = OFStatsReplyFlagsSerializerVer13.readFrom(bb); // pad: 4 bytes bb.skipBytes(4); List<OFPortDesc> entries = ChannelUtils.readList(bb, length - (bb.readerIndex() - start), OFPortDescVer13.READER); OFPortDescStatsReplyVer13 portDescStatsReplyVer13 = new OFPortDescStatsReplyVer13( xid, flags, entries ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", portDescStatsReplyVer13); return portDescStatsReplyVer13; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFPortDescStatsReplyVer13Funnel FUNNEL = new OFPortDescStatsReplyVer13Funnel(); static class OFPortDescStatsReplyVer13Funnel implements Funnel<OFPortDescStatsReplyVer13> { private static final long serialVersionUID = 1L; @Override public void funnel(OFPortDescStatsReplyVer13 message, PrimitiveSink sink) { // fixed value property version = 4 sink.putByte((byte) 0x4); // fixed value property type = 19 sink.putByte((byte) 0x13); // FIXME: skip funnel of length sink.putLong(message.xid); // fixed value property statsType = 13 sink.putShort((short) 0xd); OFStatsReplyFlagsSerializerVer13.putTo(message.flags, sink); // skip pad (4 bytes) FunnelUtils.putList(message.entries, sink); } } public void writeTo(ChannelBuffer bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFPortDescStatsReplyVer13> { @Override public void write(ChannelBuffer bb, OFPortDescStatsReplyVer13 message) { int startIndex = bb.writerIndex(); // fixed value property version = 4 bb.writeByte((byte) 0x4); // fixed value property type = 19 bb.writeByte((byte) 0x13); // length is length of variable message, will be updated at the end int lengthIndex = bb.writerIndex(); bb.writeShort(U16.t(0)); bb.writeInt(U32.t(message.xid)); // fixed value property statsType = 13 bb.writeShort((short) 0xd); OFStatsReplyFlagsSerializerVer13.writeTo(bb, message.flags); // pad: 4 bytes bb.writeZero(4); ChannelUtils.writeList(bb, message.entries); // update length field int length = bb.writerIndex() - startIndex; bb.setShort(lengthIndex, length); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFPortDescStatsReplyVer13("); b.append("xid=").append(xid); b.append(", "); b.append("flags=").append(flags); b.append(", "); b.append("entries=").append(entries); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFPortDescStatsReplyVer13 other = (OFPortDescStatsReplyVer13) obj; if( xid != other.xid) return false; if (flags == null) { if (other.flags != null) return false; } else if (!flags.equals(other.flags)) return false; if (entries == null) { if (other.entries != null) return false; } else if (!entries.equals(other.entries)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * (int) (xid ^ (xid >>> 32)); result = prime * result + ((flags == null) ? 0 : flags.hashCode()); result = prime * result + ((entries == null) ? 0 : entries.hashCode()); return result; } }
/* * Copyright 2013 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.jenkins.plugins.storage; import static java.util.logging.Level.SEVERE; import java.io.IOException; import java.io.Serializable; import java.math.BigInteger; import java.net.URLConnection; import java.security.GeneralSecurityException; import java.util.Collection; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.logging.Logger; import javax.annotation.Nullable; import org.apache.commons.io.FilenameUtils; import org.jenkinsci.remoting.RoleChecker; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.jenkins.plugins.storage.AbstractUploadDescriptor.GCS_SCHEME; import com.google.api.client.googleapis.media.MediaHttpUploader; import com.google.api.client.http.InputStreamContent; import com.google.api.services.storage.Storage; import com.google.api.services.storage.model.Bucket; import com.google.api.services.storage.model.ObjectAccessControl; import com.google.api.services.storage.model.StorageObject; import com.google.common.base.Joiner; import com.google.common.base.Objects; import com.google.common.base.Predicate; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.io.Files; import com.google.jenkins.plugins.credentials.oauth.GoogleRobotCredentials; import com.google.jenkins.plugins.metadata.MetadataContainer; import com.google.jenkins.plugins.storage.reports.BuildGcsUploadReport; import com.google.jenkins.plugins.util.ConflictException; import com.google.jenkins.plugins.util.Executor; import com.google.jenkins.plugins.util.ExecutorException; import com.google.jenkins.plugins.util.ForbiddenException; import com.google.jenkins.plugins.util.NotFoundException; import hudson.DescriptorExtensionList; import hudson.ExtensionPoint; import hudson.FilePath; import hudson.Util; import hudson.model.AbstractBuild; import hudson.model.Describable; import hudson.model.Hudson; import hudson.model.Result; import hudson.model.TaskListener; import hudson.remoting.Callable; /** * This new extension point is used for surfacing different kinds of * Google Cloud Storage (GCS) uploads. The most obvious implementations * are provided as: * @see ClassicUpload * @see StdoutUpload * * We provide the following hooks for implementations to inject additional * functionality: * <ul> * <li> Required {@link #getDetails}: provides detail information for the * GCS upload report. * <li> Required {@link #getInclusions}: surfaces the set of * {@link UploadSpec} for the base class to upload to GCS. * * <li> Optional {@link #forResult}: determines the build states for which * uploading should be performed. * <li> Optional {@link #getMetadata}: allows the implementation to surface * additional metadata on the storage object * <li> Optional {@link #annotateObject}: allows the implementation to * ~arbitrarily rewrite parts of the object prior to insertion. * </ul> */ public abstract class AbstractUpload implements Describable<AbstractUpload>, ExtensionPoint, Serializable { private static final Logger logger = Logger.getLogger(AbstractUpload.class.getName()); private static final ImmutableMap<String, String> CONTENT_TYPES = ImmutableMap.of( "css", "text/css" ); /** * Construct the base upload from a handful of universal properties. * * @param bucket The unresolved name of the storage bucket within * which to store the resulting objects. * @param sharedPublicly Whether to publicly share the objects being uploaded * @param forFailedJobs Whether to perform the upload regardless of the * build's outcome * @param pathPrefix Path prefix to strip from uploaded files when determining * the filename in GCS. Null indicates no stripping. Filenames that do not * start with this prefix will not be modified. Trailing slash is * automatically added if it is missing. */ public AbstractUpload(String bucket, boolean sharedPublicly, boolean forFailedJobs, @Nullable String pathPrefix, @Nullable UploadModule module) { if (module != null) { this.module = module; } else { this.module = getDescriptor().getModule(); } this.bucketNameWithVars = checkNotNull(bucket); this.sharedPublicly = sharedPublicly; this.forFailedJobs = forFailedJobs; if (pathPrefix != null && !pathPrefix.endsWith("/")) { pathPrefix += "/"; } this.pathPrefix = pathPrefix; } /** * The main action entrypoint of this extension. This uploads the * contents included by the implementation to our resolved storage * bucket. */ public final void perform(GoogleRobotCredentials credentials, AbstractBuild<?, ?> build, TaskListener listener) throws UploadException { if (!forResult(build.getResult())) { // Don't upload for the given build state. return; } try { // Turn paths containing things like $BUILD_NUMBER and $JOB_NAME into // their fully resolved forms. String bucketNameResolvedVars = Util.replaceMacro( getBucket(), build.getEnvironment(listener)); if (!bucketNameResolvedVars.startsWith(GCS_SCHEME)) { listener.error(module.prefix( Messages.AbstractUploadDescriptor_BadPrefix( bucketNameResolvedVars, GCS_SCHEME))); return; } // Lop off the GCS_SCHEME prefix. bucketNameResolvedVars = bucketNameResolvedVars.substring(GCS_SCHEME.length()); UploadSpec uploads = getInclusions( build, checkNotNull(build.getWorkspace()), listener); if (uploads != null) { BuildGcsUploadReport links = BuildGcsUploadReport.of(build); links.addBucket(bucketNameResolvedVars); initiateUploadsAtWorkspace(credentials, build, bucketNameResolvedVars, uploads, listener); } } catch (InterruptedException e) { throw new UploadException(Messages.AbstractUpload_UploadException(), e); } catch (IOException e) { throw new UploadException(Messages.AbstractUpload_UploadException(), e); } } /** * This tuple is used to return the modified workspace and collection of * {@link FilePath}s to upload to {@link #perform}. * * NOTE: The workspace is simply used to determine the path the object will * be stored in relative to the bucket. If it is relative to the workspace, * that relative path will be appended to the storage prefix. If it is * not, then the absolute path will be appended. */ protected static class UploadSpec implements Serializable { public UploadSpec(FilePath workspace, List<FilePath> inclusions) { this.workspace = checkNotNull(workspace); this.inclusions = Collections.unmodifiableCollection(inclusions); } public final FilePath workspace; public final Collection<FilePath> inclusions; } /** * Implementations override this interface in order to surface the set of * {@link FilePath}s the core logic should upload. * * @see UploadSpec for further details. */ @Nullable protected abstract UploadSpec getInclusions( AbstractBuild<?, ?> build, FilePath workspace, TaskListener listener) throws UploadException; /** * Provide detail information summarizing this download for the GCS * upload report. */ public abstract String getDetails(); /** * This hook is intended to give implementations the opportunity to further * annotate the {@link StorageObject} with metadata before uploading it to * cloud storage. * * NOTE: The base implementation does not do anything, so calling * {@code super.annotateObject()} is unnecessary. */ protected void annotateObject(StorageObject object, TaskListener listener) throws UploadException { ; } /** * Retrieves the metadata to attach to the storage object. * * NOTE: This can be overriden to surface additional (or less) information. */ protected Map<String, String> getMetadata(AbstractBuild<?, ?> build) { return MetadataContainer.of(build).getSerializedMetadata(); } /** * Determine whether we should upload the pattern for the given * build result. */ public boolean forResult(Result result) { if (result == Result.SUCCESS) { // We always run on successful builds. return true; } if (result == Result.FAILURE || result == Result.UNSTABLE) { return isForFailedJobs(); } // else NOT_BUILT return false; } /** * The bucket name specified by the user, which potentially contains * unresolved symbols, such as $JOB_NAME and $BUILD_NUMBER. */ public String getBucket() { return bucketNameWithVars; } /** NOTE: old name kept for deserialization */ private final String bucketNameWithVars; /** * Whether to surface the file being uploaded to anyone with the link. */ public boolean isSharedPublicly() { return sharedPublicly; } private final boolean sharedPublicly; /** * Whether to attempt the upload, even if the job failed. */ public boolean isForFailedJobs() { return forFailedJobs; } private final boolean forFailedJobs; /** * The path prefix that will be stripped from uploaded files. May be null * if no path prefix needs to be stripped. */ @Nullable public String getPathPrefix() { return pathPrefix; } private final String pathPrefix; /** * The module to use for providing dependencies. */ protected final UploadModule module; /** * Boilerplate, see: * https://wiki.jenkins-ci.org/display/JENKINS/Defining+a+new+extension+point */ public static DescriptorExtensionList<AbstractUpload, AbstractUploadDescriptor> all() { return checkNotNull(Hudson.getInstance()).<AbstractUpload, AbstractUploadDescriptor>getDescriptorList(AbstractUpload.class); } /** * Boilerplate, see: * https://wiki.jenkins-ci.org/display/JENKINS/Defining+a+new+extension+point */ public AbstractUploadDescriptor getDescriptor() { return (AbstractUploadDescriptor) checkNotNull(Hudson.getInstance()) .getDescriptor(getClass()); } /** * Execute the {@link UploadSpec} for this {@code build} to the bucket * specified by {@code storagePrefix} using the authority of * {@code credentials} and logging any information to {@code listener}. * * @throws UploadException if anything goes awry */ private void initiateUploadsAtWorkspace(GoogleRobotCredentials credentials, final AbstractBuild build, String storagePrefix, final UploadSpec uploads, final TaskListener listener) throws UploadException { try { // Break things down to a compatible format: // foo / bar / baz / blah.log // ^---^ ^--------------------^ // bucket storage-object // // TODO(mattmoor): Test objectPrefix on Windows, where '\' != '/' // Must we translate? Can we require them to specify in unix-style // and still have things work? String[] halves = checkNotNull(storagePrefix).split("/", 2); final String bucketName = halves[0]; final String objectPrefix = (halves.length == 1) ? "" : halves[1]; // Within the workspace, upload all of the files, using a remotable // credential to access the storage service from the remote machine. final GoogleRobotCredentials remoteCredentials = checkNotNull(credentials).forRemote(module.getRequirement()); final Map<String, String> metadata = getMetadata(build); uploads.workspace.act( new Callable<Void, UploadException>() { @Override public Void call() throws UploadException { performUploads(metadata, bucketName, objectPrefix, remoteCredentials, uploads, listener); return (Void) null; } @Override public void checkRoles(RoleChecker checker) throws SecurityException { // We know by definition that this is the correct role; // the callable exists only in this method context. } }); // We can't do this over the wire, so do it in bulk here BuildGcsUploadReport report = BuildGcsUploadReport.of(build); for (FilePath include : uploads.inclusions) { report.addUpload(getRelative(include, uploads.workspace), storagePrefix); } } catch (IOException e) { throw new UploadException( Messages.AbstractUpload_ExceptionFileUpload(), e); } catch (InterruptedException e) { throw new UploadException( Messages.AbstractUpload_ExceptionFileUpload(), e); } catch (GeneralSecurityException e) { throw new UploadException( Messages.AbstractUpload_RemoteCredentialError(), e); } } /** * This is the workhorse API for performing the actual uploads. It is * performed at the workspace, so that all of the {@link FilePath}s should * be local. */ private void performUploads(Map<String, String> metadata, String bucketName, String objectPrefix, GoogleRobotCredentials credentials, UploadSpec uploads, TaskListener listener) throws UploadException { try { Storage service = module.getStorageService(credentials); Executor executor = module.newExecutor(); // Ensure the bucket exists, fetching it regardless so that we can // attach its default ACLs to the objects we upload. Bucket bucket = getOrCreateBucket(service, credentials, executor, bucketName); for (FilePath include : uploads.inclusions) { String relativePath = getRelative(include, uploads.workspace); String uploadedFileName = relativePath; if (pathPrefix != null && relativePath.startsWith(pathPrefix)) { uploadedFileName = relativePath.substring(pathPrefix.length()); } StorageObject object = new StorageObject() .setName(FilenameUtils.concat(objectPrefix, uploadedFileName)) .setMetadata(metadata) .setContentDisposition( HttpHeaders.getContentDisposition(include.getName())) .setContentType( detectMIMEType(include.getName())) .setSize(BigInteger.valueOf(include.length())); if (isSharedPublicly()) { object.setAcl(addPublicReadAccess( getDefaultObjectAcl(bucket, listener))); } // Give clients an opportunity to decorate the storage // object before we store it. annotateObject(object, listener); // Log that we are uploading the file and begin executing the upload. listener.getLogger().println(module.prefix( Messages.AbstractUpload_Uploading(relativePath))); performUploadWithRetry(executor, service, bucket, object, include); } } catch (ForbiddenException e) { // If the user doesn't own a bucket then they will end up here. throw new UploadException( Messages.AbstractUpload_ForbiddenFileUpload(), e); } catch (ExecutorException e) { throw new UploadException( Messages.AbstractUpload_ExceptionFileUpload(), e); } catch (IOException e) { throw new UploadException( Messages.AbstractUpload_ExceptionFileUpload(), e); } catch (InterruptedException e) { throw new UploadException( Messages.AbstractUpload_ExceptionFileUpload(), e); } } /** * Auxiliar method for detecting web-related filename extensions, so * setting correctly Content-Type. */ private String detectMIMEType(String filename) { String extension = Files.getFileExtension(filename); if (CONTENT_TYPES.containsKey(extension)) { return CONTENT_TYPES.get(extension); } else { return URLConnection.guessContentTypeFromName(filename); } } /** * We need our own storage retry logic because we must recreate the * input stream for the media uploader. */ private void performUploadWithRetry(Executor executor, Storage service, Bucket bucket, StorageObject object, FilePath include) throws ExecutorException, IOException, InterruptedException { IOException lastIOException = null; InterruptedException lastInterruptedException = null; for (int i = 0; i < module.getInsertRetryCount(); ++i) { try { // Create the insertion operation with the decorated object and // an input stream of the file contents. Storage.Objects.Insert insertion = service.objects().insert(bucket.getName(), object, new InputStreamContent( object.getContentType(), include.read())); // Make the operation non-resumable because we have seen a dramatic // (e.g. 1000x) speedup from this. MediaHttpUploader mediaUploader = insertion.getMediaHttpUploader(); if (mediaUploader != null) { mediaUploader.setDirectUploadEnabled(true); } executor.execute(insertion); return; } catch (IOException e) { logger.log(SEVERE, Messages.AbstractUpload_UploadError(i), e); lastIOException = e; } catch (InterruptedException e) { logger.log(SEVERE, Messages.AbstractUpload_UploadError(i), e); lastInterruptedException = e; } // Pause before we retry executor.sleep(); } // NOTE: We only reach here along paths that encountered an exception. // The "happy path" returns from the "try" statement above. if (lastIOException != null) { throw lastIOException; } throw checkNotNull(lastInterruptedException); } // Fetch the default object ACL for this bucket. Return an empty list if // we cannot. private static List<ObjectAccessControl> getDefaultObjectAcl(Bucket bucket, TaskListener listener) { List<ObjectAccessControl> defaultAcl = bucket.getDefaultObjectAcl(); if (defaultAcl == null) { listener.error(Messages.AbstractUpload_BucketObjectAclsError( bucket.getName())); return ImmutableList.of(); } else { return defaultAcl; } } // Add public access to a given access control list private static List<ObjectAccessControl> addPublicReadAccess( List<ObjectAccessControl> defaultAcl) { List<ObjectAccessControl> acl = Lists.newArrayList(defaultAcl); final String publicEntity = "allUsers"; boolean alreadyShared = Iterables.tryFind(acl, new Predicate<ObjectAccessControl>() { @Override public boolean apply(ObjectAccessControl access) { return Objects.equal(access.getEntity(), publicEntity); } }).isPresent(); /* If the entity 'allUsers' didn't already has READER or OWNER access, grant READER. This is to avoid having both an OWNER record and a READER record for that same entity */ if (!alreadyShared) { acl.add(new ObjectAccessControl() .setEntity("allUsers") .setRole("READER")); } return acl; } /** * Fetches or creates an instance of the bucket with the given name with the * specified storage service. * * @param credentials The credentials with which to fetch/create the bucket * @param bucketName The top-level bucket name to ensure exists * @return an instance of the named bucket, created or retrieved. * @throws UploadException if any issues are encountered */ protected Bucket getOrCreateBucket(Storage service, GoogleRobotCredentials credentials, Executor executor, String bucketName) throws UploadException { try { try { return executor.execute(service.buckets() .get(bucketName) .setProjection("full")); // to retrieve the bucket ACLs } catch (NotFoundException e) { try { // This is roughly the opposite of how the command-line sample does // things. We do things this way to optimize for the case where the // bucket already exists. Bucket bucket = new Bucket().setName(bucketName); bucket = executor.execute(service.buckets() .insert(credentials.getProjectId(), bucket) .setProjection("full")); // to retrieve the bucket ACLs return bucket; } catch (ConflictException ex) { // If we get back a "Conflict" response, it means that the bucket // was inserted between when we first tried to get it and were able // to successfully insert one. // NOTE: This could be due to an initial insertion attempt succeeding // but returning an exception, or a race with another service. return executor.execute(service.buckets() .get(bucketName) .setProjection("full")); // to retrieve the bucket ACLs } } } catch (ExecutorException e) { throw new UploadException( Messages.AbstractUpload_ExceptionGetBucket(bucketName), e); } catch (IOException e) { throw new UploadException( Messages.AbstractUpload_ExceptionGetBucket(bucketName), e); } } /** * Compute the relative path of the given file inclusion, relative to the * given workspace. If the path is absolute, it returns the root-relative * path instead. * * @param include The file whose relative path we are computing * @param workspace The workspace containing the included file. * @return The unix-style relative path of file. * @throws UploadException when the input is malformed */ public static String getRelative(FilePath include, FilePath workspace) throws UploadException { LinkedList<String> segments = new LinkedList<String>(); while (!include.equals(workspace)) { segments.push(include.getName()); include = include.getParent(); if (Strings.isNullOrEmpty(include.getName())) { // When we reach "/" we're done either way. break; } } return Joiner.on("/").join(segments); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.hadoop.fs.v1; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.OutputStream; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.InvalidPathException; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.Progressable; import org.apache.ignite.igfs.IgfsBlockLocation; import org.apache.ignite.igfs.IgfsException; import org.apache.ignite.igfs.IgfsFile; import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.igfs.IgfsPathSummary; import org.apache.ignite.internal.igfs.common.IgfsLogger; import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsInputStream; import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsOutputStream; import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsStreamDelegate; import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsWrapper; import org.apache.ignite.internal.processors.igfs.IgfsHandshakeResponse; import org.apache.ignite.internal.processors.igfs.IgfsModeResolver; import org.apache.ignite.internal.processors.igfs.IgfsUtils; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.internal.util.typedef.internal.A; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.jetbrains.annotations.Nullable; import static org.apache.ignite.configuration.FileSystemConfiguration.DFLT_IGFS_LOG_BATCH_SIZE; import static org.apache.ignite.configuration.FileSystemConfiguration.DFLT_IGFS_LOG_DIR; import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_COLOCATED_WRITES; import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_LOG_BATCH_SIZE; import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_LOG_DIR; import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_LOG_ENABLED; import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_PREFER_LOCAL_WRITES; import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_SEQ_READS_BEFORE_PREFETCH; import static org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsUtils.parameter; import static org.apache.ignite.internal.processors.igfs.IgfsEx.IGFS_SCHEME; /** * {@code IGFS} Hadoop 1.x file system driver over file system API. To use * {@code IGFS} as Hadoop file system, you should configure this class * in Hadoop's {@code core-site.xml} as follows: * <pre name="code" class="xml"> * &lt;property&gt; * &lt;name&gt;fs.default.name&lt;/name&gt; * &lt;value&gt;igfs:///&lt;/value&gt; * &lt;/property&gt; * * &lt;property&gt; * &lt;name&gt;fs.igfs.impl&lt;/name&gt; * &lt;value&gt;org.apache.ignite.hadoop.fs.v1.IgniteHadoopFileSystem&lt;/value&gt; * &lt;/property&gt; * </pre> * You should also add Ignite JAR and all libraries to Hadoop classpath. To * do this, add following lines to {@code conf/hadoop-env.sh} script in Hadoop * distribution: * <pre name="code" class="bash"> * export IGNITE_HOME=/path/to/Ignite/distribution * export HADOOP_CLASSPATH=$IGNITE_HOME/ignite*.jar * * for f in $IGNITE_HOME/libs/*.jar; do * export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$f; * done * </pre> * <h1 class="header">Data vs Clients Nodes</h1> * Hadoop needs to use its FileSystem remotely from client nodes as well as directly on * data nodes. Client nodes are responsible for basic file system operations as well as * accessing data nodes remotely. Usually, client nodes are started together * with {@code job-submitter} or {@code job-scheduler} processes, while data nodes are usually * started together with Hadoop {@code task-tracker} processes. * <p> * For sample client and data node configuration refer to {@code config/hadoop/default-config-client.xml} * and {@code config/hadoop/default-config.xml} configuration files in Ignite installation. */ public class IgniteHadoopFileSystem extends FileSystem { /** Empty array of file block locations. */ private static final BlockLocation[] EMPTY_BLOCK_LOCATIONS = new BlockLocation[0]; /** Ensures that close routine is invoked at most once. */ private final AtomicBoolean closeGuard = new AtomicBoolean(); /** Grid remote client. */ private HadoopIgfsWrapper rmtClient; /** working directory. */ private Path workingDir; /** Default replication factor. */ private short dfltReplication; /** Base file system uri. */ @SuppressWarnings("FieldAccessedSynchronizedAndUnsynchronized") private URI uri; /** Authority. */ private String uriAuthority; /** Client logger. */ private IgfsLogger clientLog; /** The user name this file system was created on behalf of. */ private String user; /** Whether custom sequential reads before prefetch value is provided. */ private boolean seqReadsBeforePrefetchOverride; /** IGFS group block size. */ private long igfsGrpBlockSize; /** Flag that controls whether file writes should be colocated. */ private boolean colocateFileWrites; /** Prefer local writes. */ private boolean preferLocFileWrites; /** Custom-provided sequential reads before prefetch. */ private int seqReadsBeforePrefetch; /** {@inheritDoc} */ @Override public URI getUri() { if (uri == null) throw new IllegalStateException("URI is null (was IgniteHadoopFileSystem properly initialized?)."); return uri; } /** * Enter busy state. * * @throws IOException If file system is stopped. */ private void enterBusy() throws IOException { if (closeGuard.get()) throw new IOException("File system is stopped."); } /** * Leave busy state. */ private void leaveBusy() { // No-op. } /** * Gets non-null user name as per the Hadoop file system viewpoint. * @return the user name, never null. * @throws IOException On error. */ public static String getFsHadoopUser() throws IOException { UserGroupInformation currUgi = UserGroupInformation.getCurrentUser(); String user = currUgi.getShortUserName(); user = IgfsUtils.fixUserName(user); assert user != null; return user; } /** * Public setter that can be used by direct users of FS or Visor. * * @param colocateFileWrites Whether all ongoing file writes should be colocated. */ @SuppressWarnings("UnusedDeclaration") public void colocateFileWrites(boolean colocateFileWrites) { this.colocateFileWrites = colocateFileWrites; } /** {@inheritDoc} */ @SuppressWarnings("ConstantConditions") @Override public void initialize(URI name, Configuration cfg) throws IOException { enterBusy(); try { if (rmtClient != null) throw new IOException("File system is already initialized: " + rmtClient); A.notNull(name, "name"); A.notNull(cfg, "cfg"); super.initialize(name, cfg); setConf(cfg); if (!IGFS_SCHEME.equals(name.getScheme())) throw new IOException("Illegal file system URI [expected=" + IGFS_SCHEME + "://[name]/[optional_path], actual=" + name + ']'); uri = name; uriAuthority = uri.getAuthority(); user = getFsHadoopUser(); // Override sequential reads before prefetch if needed. seqReadsBeforePrefetch = parameter(cfg, PARAM_IGFS_SEQ_READS_BEFORE_PREFETCH, uriAuthority, 0); if (seqReadsBeforePrefetch > 0) seqReadsBeforePrefetchOverride = true; // In Ignite replication factor is controlled by data cache affinity. // We use replication factor to force the whole file to be stored on local node. dfltReplication = (short)cfg.getInt("dfs.replication", 3); // Get file colocation control flag. colocateFileWrites = parameter(cfg, PARAM_IGFS_COLOCATED_WRITES, uriAuthority, false); preferLocFileWrites = cfg.getBoolean(PARAM_IGFS_PREFER_LOCAL_WRITES, false); // Get log directory. String logDirCfg = parameter(cfg, PARAM_IGFS_LOG_DIR, uriAuthority, DFLT_IGFS_LOG_DIR); File logDirFile = U.resolveIgnitePath(logDirCfg); String logDir = logDirFile != null ? logDirFile.getAbsolutePath() : null; rmtClient = new HadoopIgfsWrapper(uriAuthority, logDir, cfg, LOG, user); // Handshake. IgfsHandshakeResponse handshake = rmtClient.handshake(logDir); igfsGrpBlockSize = handshake.blockSize(); // Initialize client logger. Boolean logEnabled = parameter(cfg, PARAM_IGFS_LOG_ENABLED, uriAuthority, false); if (handshake.sampling() != null ? handshake.sampling() : logEnabled) { // Initiate client logger. if (logDir == null) throw new IOException("Failed to resolve log directory: " + logDirCfg); Integer batchSize = parameter(cfg, PARAM_IGFS_LOG_BATCH_SIZE, uriAuthority, DFLT_IGFS_LOG_BATCH_SIZE); clientLog = IgfsLogger.logger(uriAuthority, handshake.igfsName(), logDir, batchSize); } else clientLog = IgfsLogger.disabledLogger(); // set working directory to the home directory of the current Fs user: setWorkingDirectory(null); } finally { leaveBusy(); } } /** {@inheritDoc} */ @Override protected void checkPath(Path path) { URI uri = path.toUri(); if (uri.isAbsolute()) { if (!F.eq(uri.getScheme(), IGFS_SCHEME)) throw new InvalidPathException("Wrong path scheme [expected=" + IGFS_SCHEME + ", actual=" + uri.getAuthority() + ']'); if (!F.eq(uri.getAuthority(), uriAuthority)) throw new InvalidPathException("Wrong path authority [expected=" + uriAuthority + ", actual=" + uri.getAuthority() + ']'); } } /** {@inheritDoc} */ @SuppressWarnings("deprecation") @Override public short getDefaultReplication() { return dfltReplication; } /** {@inheritDoc} */ @Override protected void finalize() throws Throwable { super.finalize(); close(); } /** {@inheritDoc} */ @Override public void close() throws IOException { if (closeGuard.compareAndSet(false, true)) close0(); } /** * Closes file system. * * @throws IOException If failed. */ private void close0() throws IOException { if (LOG.isDebugEnabled()) LOG.debug("File system closed [uri=" + uri + ", endpoint=" + uriAuthority + ']'); if (rmtClient == null) return; super.close(); rmtClient.close(false); if (clientLog.isLogEnabled()) clientLog.close(); // Reset initialized resources. uri = null; rmtClient = null; } /** {@inheritDoc} */ @Override public void setTimes(Path p, long mtime, long atime) throws IOException { enterBusy(); try { A.notNull(p, "p"); IgfsPath path = convert(p); rmtClient.setTimes(path, atime, mtime); } finally { leaveBusy(); } } /** {@inheritDoc} */ @Override public void setPermission(Path p, FsPermission perm) throws IOException { enterBusy(); try { A.notNull(p, "p"); if (rmtClient.update(convert(p), permission(perm)) == null) { throw new IOException("Failed to set file permission (file not found?)" + " [path=" + p + ", perm=" + perm + ']'); } } finally { leaveBusy(); } } /** {@inheritDoc} */ @Override public void setOwner(Path p, String username, String grpName) throws IOException { A.notNull(p, "p"); A.notNull(username, "username"); A.notNull(grpName, "grpName"); enterBusy(); try { if (rmtClient.update(convert(p), F.asMap(IgfsUtils.PROP_USER_NAME, username, IgfsUtils.PROP_GROUP_NAME, grpName)) == null) { throw new IOException("Failed to set file permission (file not found?)" + " [path=" + p + ", userName=" + username + ", groupName=" + grpName + ']'); } } finally { leaveBusy(); } } /** {@inheritDoc} */ @Override public FSDataInputStream open(Path f, int bufSize) throws IOException { A.notNull(f, "f"); enterBusy(); try { IgfsPath path = convert(f); HadoopIgfsStreamDelegate stream = seqReadsBeforePrefetchOverride ? rmtClient.open(path, seqReadsBeforePrefetch) : rmtClient.open(path); long logId = -1; if (clientLog.isLogEnabled()) { logId = IgfsLogger.nextId(); clientLog.logOpen(logId, path, bufSize, stream.length()); } if (LOG.isDebugEnabled()) LOG.debug("Opening input stream [thread=" + Thread.currentThread().getName() + ", path=" + path + ", bufSize=" + bufSize + ']'); HadoopIgfsInputStream igfsIn = new HadoopIgfsInputStream(stream, stream.length(), bufSize, LOG, clientLog, logId); if (LOG.isDebugEnabled()) LOG.debug("Opened input stream [path=" + path + ", delegate=" + stream + ']'); return new FSDataInputStream(igfsIn); } finally { leaveBusy(); } } /** {@inheritDoc} */ @SuppressWarnings("deprecation") @Override public FSDataOutputStream create(Path f, final FsPermission perm, boolean overwrite, int bufSize, short replication, long blockSize, Progressable progress) throws IOException { A.notNull(f, "f"); enterBusy(); OutputStream out = null; try { IgfsPath path = convert(f); if (LOG.isDebugEnabled()) LOG.debug("Opening output stream in create [thread=" + Thread.currentThread().getName() + "path=" + path + ", overwrite=" + overwrite + ", bufSize=" + bufSize + ']'); Map<String,String> propMap = permission(perm); propMap.put(IgfsUtils.PROP_PREFER_LOCAL_WRITES, Boolean.toString(preferLocFileWrites)); // Create stream and close it in the 'finally' section if any sequential operation failed. HadoopIgfsStreamDelegate stream = rmtClient.create(path, overwrite, colocateFileWrites, replication, blockSize, propMap); assert stream != null; long logId = -1; if (clientLog.isLogEnabled()) { logId = IgfsLogger.nextId(); clientLog.logCreate(logId, path, overwrite, bufSize, replication, blockSize); } if (LOG.isDebugEnabled()) LOG.debug("Opened output stream in create [path=" + path + ", delegate=" + stream + ']'); HadoopIgfsOutputStream igfsOut = new HadoopIgfsOutputStream(stream, LOG, clientLog, logId); bufSize = Math.max(64 * 1024, bufSize); out = new BufferedOutputStream(igfsOut, bufSize); FSDataOutputStream res = new FSDataOutputStream(out, null, 0); // Mark stream created successfully. out = null; return res; } finally { // Close if failed during stream creation. if (out != null) U.closeQuiet(out); leaveBusy(); } } /** {@inheritDoc} */ @SuppressWarnings("deprecation") @Override public FSDataOutputStream append(Path f, int bufSize, Progressable progress) throws IOException { A.notNull(f, "f"); enterBusy(); try { IgfsPath path = convert(f); if (LOG.isDebugEnabled()) LOG.debug("Opening output stream in append [thread=" + Thread.currentThread().getName() + ", path=" + path + ", bufSize=" + bufSize + ']'); HadoopIgfsStreamDelegate stream = rmtClient.append(path, false, null); assert stream != null; long logId = -1; if (clientLog.isLogEnabled()) { logId = IgfsLogger.nextId(); clientLog.logAppend(logId, path, bufSize); } if (LOG.isDebugEnabled()) LOG.debug("Opened output stream in append [path=" + path + ", delegate=" + stream + ']'); HadoopIgfsOutputStream igfsOut = new HadoopIgfsOutputStream(stream, LOG, clientLog, logId); bufSize = Math.max(64 * 1024, bufSize); BufferedOutputStream out = new BufferedOutputStream(igfsOut, bufSize); return new FSDataOutputStream(out, null, 0); } finally { leaveBusy(); } } /** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override public boolean rename(Path src, Path dst) throws IOException { A.notNull(src, "src"); A.notNull(dst, "dst"); enterBusy(); try { IgfsPath srcPath = convert(src); IgfsPath dstPath = convert(dst); if (clientLog.isLogEnabled()) clientLog.logRename(srcPath, dstPath); try { rmtClient.rename(srcPath, dstPath); } catch (IOException ioe) { // Log the exception before rethrowing since it may be ignored: LOG.warn("Failed to rename [srcPath=" + srcPath + ", dstPath=" + dstPath + ']', ioe); throw ioe; } return true; } catch (IOException e) { // Intentionally ignore IGFS exceptions here to follow Hadoop contract. if (F.eq(IOException.class, e.getClass()) && (e.getCause() == null || !X.hasCause(e.getCause(), IgfsException.class))) throw e; else return false; } finally { leaveBusy(); } } /** {@inheritDoc} */ @SuppressWarnings("deprecation") @Override public boolean delete(Path f) throws IOException { return delete(f, false); } /** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override public boolean delete(Path f, boolean recursive) throws IOException { A.notNull(f, "f"); enterBusy(); try { IgfsPath path = convert(f); // Will throw exception if delete failed. boolean res = rmtClient.delete(path, recursive); if (clientLog.isLogEnabled()) clientLog.logDelete(path, recursive); return res; } catch (IOException e) { // Intentionally ignore IGFS exceptions here to follow Hadoop contract. if (F.eq(IOException.class, e.getClass()) && (e.getCause() == null || !X.hasCause(e.getCause(), IgfsException.class))) throw e; else return false; } finally { leaveBusy(); } } /** {@inheritDoc} */ @Override public FileStatus[] listStatus(Path f) throws IOException { A.notNull(f, "f"); enterBusy(); try { IgfsPath path = convert(f); Collection<IgfsFile> list = rmtClient.listFiles(path); if (list == null) throw new FileNotFoundException("File " + f + " does not exist."); List<IgfsFile> files = new ArrayList<>(list); FileStatus[] arr = new FileStatus[files.size()]; for (int i = 0; i < arr.length; i++) arr[i] = convert(files.get(i)); if (clientLog.isLogEnabled()) { String[] fileArr = new String[arr.length]; for (int i = 0; i < arr.length; i++) fileArr[i] = arr[i].getPath().toString(); clientLog.logListDirectory(path, fileArr); } return arr; } finally { leaveBusy(); } } /** {@inheritDoc} */ @Override public Path getHomeDirectory() { Path path = new Path("/user/" + user); return path.makeQualified(getUri(), null); } /** {@inheritDoc} */ @Override public void setWorkingDirectory(Path newPath) { if (newPath == null) workingDir = getHomeDirectory(); else { Path fixedNewPath = fixRelativePart(newPath); String res = fixedNewPath.toUri().getPath(); if (!DFSUtil.isValidName(res)) throw new IllegalArgumentException("Invalid DFS directory name " + res); workingDir = fixedNewPath; } } /** {@inheritDoc} */ @Override public Path getWorkingDirectory() { return workingDir; } /** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override public boolean mkdirs(Path f, FsPermission perm) throws IOException { A.notNull(f, "f"); enterBusy(); try { IgfsPath path = convert(f); boolean mkdirRes = rmtClient.mkdirs(path, permission(perm)); if (clientLog.isLogEnabled()) clientLog.logMakeDirectory(path); return mkdirRes; } catch (IOException e) { // Intentionally ignore IGFS exceptions here to follow Hadoop contract. if (F.eq(IOException.class, e.getClass()) && (e.getCause() == null || !X.hasCause(e.getCause(), IgfsException.class))) throw e; else return false; } finally { leaveBusy(); } } /** {@inheritDoc} */ @Override public FileStatus getFileStatus(Path f) throws IOException { A.notNull(f, "f"); enterBusy(); try { IgfsFile info = rmtClient.info(convert(f)); if (info == null) throw new FileNotFoundException("File not found: " + f); return convert(info); } finally { leaveBusy(); } } /** {@inheritDoc} */ @Override public ContentSummary getContentSummary(Path f) throws IOException { A.notNull(f, "f"); enterBusy(); try { IgfsPathSummary sum = rmtClient.contentSummary(convert(f)); return new ContentSummary(sum.totalLength(), sum.filesCount(), sum.directoriesCount(), -1, sum.totalLength(), rmtClient.fsStatus().spaceTotal()); } finally { leaveBusy(); } } /** {@inheritDoc} */ @Override public BlockLocation[] getFileBlockLocations(FileStatus status, long start, long len) throws IOException { A.notNull(status, "status"); enterBusy(); try { IgfsPath path = convert(status.getPath()); long now = System.currentTimeMillis(); List<IgfsBlockLocation> affinity = new ArrayList<>(rmtClient.affinity(path, start, len)); BlockLocation[] arr = new BlockLocation[affinity.size()]; for (int i = 0; i < arr.length; i++) arr[i] = convert(affinity.get(i)); if (LOG.isDebugEnabled()) LOG.debug("Fetched file locations [path=" + path + ", fetchTime=" + (System.currentTimeMillis() - now) + ", locations=" + Arrays.asList(arr) + ']'); return arr; } catch (FileNotFoundException ignored) { return EMPTY_BLOCK_LOCATIONS; } finally { leaveBusy(); } } /** {@inheritDoc} */ @SuppressWarnings("deprecation") @Override public long getDefaultBlockSize() { return igfsGrpBlockSize; } /** * @return Mode resolver. * @throws IOException On error. */ public IgfsModeResolver getModeResolver() throws IOException { enterBusy(); try { return rmtClient.modeResolver(); } finally { leaveBusy(); } } /** * Convert IGFS path into Hadoop path. * * @param path IGFS path. * @return Hadoop path. */ private Path convert(IgfsPath path) { return new Path(IGFS_SCHEME, uriAuthority, path.toString()); } /** * Convert Hadoop path into IGFS path. * * @param path Hadoop path. * @return IGFS path. */ @Nullable private IgfsPath convert(@Nullable Path path) { if (path == null) return null; return path.isAbsolute() ? new IgfsPath(path.toUri().getPath()) : new IgfsPath(convert(workingDir), path.toUri().getPath()); } /** * Convert IGFS affinity block location into Hadoop affinity block location. * * @param block IGFS affinity block location. * @return Hadoop affinity block location. */ private BlockLocation convert(IgfsBlockLocation block) { Collection<String> names = block.names(); Collection<String> hosts = block.hosts(); return new BlockLocation( names.toArray(new String[names.size()]) /* hostname:portNumber of data nodes */, hosts.toArray(new String[hosts.size()]) /* hostnames of data nodes */, block.start(), block.length() ) { @Override public String toString() { try { return "BlockLocation [offset=" + getOffset() + ", length=" + getLength() + ", hosts=" + Arrays.asList(getHosts()) + ", names=" + Arrays.asList(getNames()) + ']'; } catch (IOException e) { throw new RuntimeException(e); } } }; } /** * Convert IGFS file information into Hadoop file status. * * @param file IGFS file information. * @return Hadoop file status. */ @SuppressWarnings("deprecation") private FileStatus convert(IgfsFile file) { return new FileStatus( file.length(), file.isDirectory(), getDefaultReplication(), file.groupBlockSize(), file.modificationTime(), file.accessTime(), permission(file), file.property(IgfsUtils.PROP_USER_NAME, user), file.property(IgfsUtils.PROP_GROUP_NAME, "users"), convert(file.path())) { @Override public String toString() { return "FileStatus [path=" + getPath() + ", isDir=" + isDir() + ", len=" + getLen() + ", mtime=" + getModificationTime() + ", atime=" + getAccessTime() + ']'; } }; } /** * Convert Hadoop permission into IGFS file attribute. * * @param perm Hadoop permission. * @return IGFS attributes. */ private Map<String, String> permission(FsPermission perm) { if (perm == null) perm = FsPermission.getDefault(); return F.asMap(IgfsUtils.PROP_PERMISSION, toString(perm)); } /** * @param perm Permission. * @return String. */ private static String toString(FsPermission perm) { return String.format("%04o", perm.toShort()); } /** * Convert IGFS file attributes into Hadoop permission. * * @param file File info. * @return Hadoop permission. */ private FsPermission permission(IgfsFile file) { String perm = file.property(IgfsUtils.PROP_PERMISSION, null); if (perm == null) return FsPermission.getDefault(); try { return new FsPermission((short)Integer.parseInt(perm, 8)); } catch (NumberFormatException ignore) { return FsPermission.getDefault(); } } /** {@inheritDoc} */ @Override public String toString() { return S.toString(IgniteHadoopFileSystem.class, this); } /** * Returns the user name this File System is created on behalf of. * @return the user name */ public String user() { return user; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.taverna.wsdl.soap; import java.io.BufferedInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.activation.DataHandler; import javax.wsdl.WSDLException; import javax.xml.parsers.ParserConfigurationException; import javax.xml.soap.AttachmentPart; import javax.xml.soap.MessageFactory; import javax.xml.soap.SOAPConstants; import javax.xml.soap.SOAPElement; import javax.xml.soap.SOAPEnvelope; import javax.xml.soap.SOAPException; import javax.xml.soap.SOAPMessage; import org.apache.taverna.wsdl.parser.UnknownOperationException; import org.apache.taverna.wsdl.parser.WSDLParser; import org.apache.log4j.Logger; import org.xml.sax.SAXException; /** * Invoke SOAP based webservices * * @author Stuart Owen * */ @SuppressWarnings("unchecked") public class WSDLSOAPInvoker { private static final String ATTACHMENT_LIST = "attachmentList"; private static Logger logger = Logger.getLogger(WSDLSOAPInvoker.class); private BodyBuilderFactory bodyBuilderFactory = BodyBuilderFactory.instance(); private WSDLParser parser; private String operationName; private List<String> outputNames; private JaxWSInvoker invoker; public WSDLSOAPInvoker(WSDLParser parser, String operationName, List<String> outputNames) { this.parser = parser; this.operationName = operationName; this.outputNames = outputNames; invoker = new JaxWSInvoker(parser, null, operationName); invoker.setTimeout(getTimeout()); } public void setCredentials(String username, String password) { invoker.setCredentials(username, password); } public void setWSSSecurity(WSSTokenProfile token) { invoker.setWSSSecurity(token); } protected String getOperationName() { return operationName; } protected WSDLParser getParser() { return parser; } protected List<String> getOutputNames() { return outputNames; } /** * Invokes the webservice with the supplied input Map, and returns a Map * containing the outputs, mapped against their output names. * * @param inputMap * @return * @throws Exception */ public Map<String, Object> invoke(Map inputMap) throws Exception { SOAPMessage message = makeRequestEnvelope(inputMap); return invoke(message); } public SOAPMessage call(SOAPMessage message) throws Exception { return invoker.call(message); // String endpoint = parser.getOperationEndpointLocations(operationName).get(0); // URL endpointURL = new URL(endpoint); // // String soapAction = parser.getSOAPActionURI(operationName); // if (soapAction != null) { // MimeHeaders headers = message.getMimeHeaders(); // headers.setHeader("SOAPAction", soapAction); // } // // logger.info("Invoking service with SOAP envelope:\n" + message.getSOAPPart().getEnvelope()); // // SOAPConnectionFactory factory = SOAPConnectionFactory.newInstance(); // SOAPConnection connection = factory.createConnection(); // //// call.setTimeout(getTimeout()); // return connection.call(message, endpointURL); } /** * Invokes the webservice with the supplied input Map and preconfigured axis call, * and returns a Map containing the outputs, mapped against their output names. */ public Map<String, Object> invoke(SOAPMessage message) throws Exception { SOAPMessage response = call(message); List<SOAPElement> responseElements = new ArrayList(); for (Iterator<SOAPElement> iter = response.getSOAPBody().getChildElements(); iter.hasNext();) { responseElements.add(iter.next()); } logger.info("Received SOAP response:\n"+response); Map<String, Object> result; if (responseElements.isEmpty()) { if (outputNames.size() == 1 && outputNames.get(0).equals(ATTACHMENT_LIST)) { // Could be axis 2 service with no output (TAV-617) result = new HashMap<String, Object>(); } else { throw new IllegalStateException( "Missing expected outputs from service"); } } else { logger.info("SOAP response was:" + response); SOAPResponseParser responseParser = SOAPResponseParserFactory.instance().create(responseElements, getUse(), getStyle(), parser.getOperationOutputParameters(operationName)); result = responseParser.parse(responseElements); } result.put(ATTACHMENT_LIST, extractAttachments(message)); return result; } protected SOAPMessage makeRequestEnvelope(Map inputMap) throws UnknownOperationException, IOException, WSDLException, ParserConfigurationException, SOAPException, SAXException { MessageFactory factory = MessageFactory.newInstance(SOAPConstants.SOAP_1_1_PROTOCOL); // TODO: SOAP version SOAPMessage message = factory.createMessage(); // // String soapAction = parser.getSOAPActionURI(operationName); // if (soapAction != null) { // MimeHeaders headers = message.getMimeHeaders(); // headers.addHeader("SOAPAction", soapAction); // } // // if (username != null && username.length() > 0 && // password != null && password.length() > 0) { // String authorization = DatatypeConverter.printBase64Binary((username+":"+password).getBytes()); // MimeHeaders headers = message.getMimeHeaders(); // headers.addHeader("Authorization", "Basic " + authorization); // } SOAPEnvelope requestEnv = message.getSOAPPart().getEnvelope(); addSoapHeader(requestEnv); requestEnv.getBody().addChildElement(makeSoapBody(inputMap)); return message; } protected void addSoapHeader(SOAPEnvelope envelope) throws SOAPException { } protected SOAPElement makeSoapBody(Map inputMap) throws UnknownOperationException, IOException, WSDLException, ParserConfigurationException, SOAPException, SAXException { BodyBuilder builder = bodyBuilderFactory.create(parser, operationName, parser.getOperationInputParameters(operationName)); return builder.build(inputMap); } /** * Reads the property taverna.wsdl.timeout, default to 5 minutes if missing. * * @return */ protected Integer getTimeout() { int result = 300000; String minutesStr = System.getProperty("taverna.wsdl.timeout"); if (minutesStr == null) { // using default of 5 minutes return result; } try { int minutes = Integer.parseInt(minutesStr.trim()); result = minutes * 1000 * 60; } catch (NumberFormatException e) { logger.error("Non-integer timeout", e); return result; } return result; } protected String getStyle() throws UnknownOperationException { return parser.getStyle(operationName); } protected String getUse() throws UnknownOperationException { return parser.getUse(operationName); } /** * Exctracts any attachments that result from invoking the service, and * returns them as a List wrapped within a DataThing * * @param message * @return * @throws SOAPException * @throws IOException */ protected List extractAttachments(SOAPMessage message) throws SOAPException, IOException { List attachmentList = new ArrayList(); if (message.countAttachments() > 0) { for (Iterator i = message.getAttachments(); i .hasNext();) { AttachmentPart ap = (AttachmentPart) i.next(); DataHandler dh = ap.getDataHandler(); BufferedInputStream bis = new BufferedInputStream(dh .getInputStream()); ByteArrayOutputStream bos = new ByteArrayOutputStream(); int c; while ((c = bis.read()) != -1) { bos.write(c); } bis.close(); bos.close(); String mimeType = dh.getContentType(); if (mimeType.matches(".*image.*") || mimeType.matches(".*octet.*") || mimeType.matches(".*audio.*") || mimeType.matches(".*application/zip.*")) { attachmentList.add(bos.toByteArray()); } else { attachmentList.add(new String(bos.toByteArray())); } } } return attachmentList; } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.11 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2015.08.28 at 12:35:27 AM EDT // package com.es.manager.property.design; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; import com.es.manager.common.LogType; import com.es.manager.property.use.UseDecimalType; import com.es.manager.property.use.UseIntegerType; import com.es.manager.property.use.UseYesNoType; /** * <p>Java class for multifamilyHousingType complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="multifamilyHousingType"&gt; * &lt;complexContent&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"&gt; * &lt;all&gt; * &lt;element name="name" type="{}propertyUseNameType"/&gt; * &lt;element name="useDetails"&gt; * &lt;complexType&gt; * &lt;complexContent&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"&gt; * &lt;all&gt; * &lt;element ref="{}totalGrossFloorArea"/&gt; * &lt;element ref="{}numberOfResidentialLivingUnits" minOccurs="0"/&gt; * &lt;element ref="{}numberOfBedrooms" minOccurs="0"/&gt; * &lt;element ref="{}numberOfResidentialLivingUnitsMidRiseSetting" minOccurs="0"/&gt; * &lt;element ref="{}numberOfLaundryHookupsInAllUnits" minOccurs="0"/&gt; * &lt;element ref="{}numberOfLaundryHookupsInCommonArea" minOccurs="0"/&gt; * &lt;element ref="{}numberOfResidentialLivingUnitsLowRiseSetting" minOccurs="0"/&gt; * &lt;element ref="{}percentHeated" minOccurs="0"/&gt; * &lt;element ref="{}percentCooled" minOccurs="0"/&gt; * &lt;element ref="{}numberOfResidentialLivingUnitsHighRiseSetting" minOccurs="0"/&gt; * &lt;element ref="{}residentPopulation" minOccurs="0"/&gt; * &lt;element ref="{}governmentSubsidizedHousing" minOccurs="0"/&gt; * &lt;/all&gt; * &lt;/restriction&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * &lt;/element&gt; * &lt;element name="audit" type="{}logType" minOccurs="0"/&gt; * &lt;/all&gt; * &lt;/restriction&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "multifamilyHousingType", propOrder = { }) @XmlRootElement(name="multifamilyHousing") public class MultifamilyHousingType extends BasePropertyUseType{ @XmlElement(required = true) protected String name; @XmlElement(required = true) protected MultifamilyHousingType.UseDetails useDetails; protected LogType audit; /** * Gets the value of the name property. * * @return * possible object is * {@link String } * */ public String getName() { return name; } /** * Sets the value of the name property. * * @param value * allowed object is * {@link String } * */ public void setName(String value) { this.name = value; } /** * Gets the value of the useDetails property. * * @return * possible object is * {@link MultifamilyHousingType.UseDetails } * */ public MultifamilyHousingType.UseDetails getUseDetails() { return useDetails; } /** * Sets the value of the useDetails property. * * @param value * allowed object is * {@link MultifamilyHousingType.UseDetails } * */ public void setUseDetails(MultifamilyHousingType.UseDetails value) { this.useDetails = value; } /** * Gets the value of the audit property. * * @return * possible object is * {@link LogType } * */ public LogType getAudit() { return audit; } /** * Sets the value of the audit property. * * @param value * allowed object is * {@link LogType } * */ public void setAudit(LogType value) { this.audit = value; } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType&gt; * &lt;complexContent&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"&gt; * &lt;all&gt; * &lt;element ref="{}totalGrossFloorArea"/&gt; * &lt;element ref="{}numberOfResidentialLivingUnits" minOccurs="0"/&gt; * &lt;element ref="{}numberOfBedrooms" minOccurs="0"/&gt; * &lt;element ref="{}numberOfResidentialLivingUnitsMidRiseSetting" minOccurs="0"/&gt; * &lt;element ref="{}numberOfLaundryHookupsInAllUnits" minOccurs="0"/&gt; * &lt;element ref="{}numberOfLaundryHookupsInCommonArea" minOccurs="0"/&gt; * &lt;element ref="{}numberOfResidentialLivingUnitsLowRiseSetting" minOccurs="0"/&gt; * &lt;element ref="{}percentHeated" minOccurs="0"/&gt; * &lt;element ref="{}percentCooled" minOccurs="0"/&gt; * &lt;element ref="{}numberOfResidentialLivingUnitsHighRiseSetting" minOccurs="0"/&gt; * &lt;element ref="{}residentPopulation" minOccurs="0"/&gt; * &lt;element ref="{}governmentSubsidizedHousing" minOccurs="0"/&gt; * &lt;/all&gt; * &lt;/restriction&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { }) public static class UseDetails { @XmlElement(required = true) protected GrossFloorAreaType totalGrossFloorArea; protected UseDecimalType numberOfResidentialLivingUnits; protected UseDecimalType numberOfBedrooms; protected UseDecimalType numberOfResidentialLivingUnitsMidRiseSetting; protected UseIntegerType numberOfLaundryHookupsInAllUnits; protected UseIntegerType numberOfLaundryHookupsInCommonArea; protected UseDecimalType numberOfResidentialLivingUnitsLowRiseSetting; protected PercentHeatedType percentHeated; protected PercentCooledType percentCooled; protected UseDecimalType numberOfResidentialLivingUnitsHighRiseSetting; protected ResidentPopulationType residentPopulation; protected UseYesNoType governmentSubsidizedHousing; /** * Gets the value of the totalGrossFloorArea property. * * @return * possible object is * {@link GrossFloorAreaType } * */ public GrossFloorAreaType getTotalGrossFloorArea() { return totalGrossFloorArea; } /** * Sets the value of the totalGrossFloorArea property. * * @param value * allowed object is * {@link GrossFloorAreaType } * */ public void setTotalGrossFloorArea(GrossFloorAreaType value) { this.totalGrossFloorArea = value; } /** * Gets the value of the numberOfResidentialLivingUnits property. * * @return * possible object is * {@link UseDecimalType } * */ public UseDecimalType getNumberOfResidentialLivingUnits() { return numberOfResidentialLivingUnits; } /** * Sets the value of the numberOfResidentialLivingUnits property. * * @param value * allowed object is * {@link UseDecimalType } * */ public void setNumberOfResidentialLivingUnits(UseDecimalType value) { this.numberOfResidentialLivingUnits = value; } /** * Gets the value of the numberOfBedrooms property. * * @return * possible object is * {@link UseDecimalType } * */ public UseDecimalType getNumberOfBedrooms() { return numberOfBedrooms; } /** * Sets the value of the numberOfBedrooms property. * * @param value * allowed object is * {@link UseDecimalType } * */ public void setNumberOfBedrooms(UseDecimalType value) { this.numberOfBedrooms = value; } /** * Gets the value of the numberOfResidentialLivingUnitsMidRiseSetting property. * * @return * possible object is * {@link UseDecimalType } * */ public UseDecimalType getNumberOfResidentialLivingUnitsMidRiseSetting() { return numberOfResidentialLivingUnitsMidRiseSetting; } /** * Sets the value of the numberOfResidentialLivingUnitsMidRiseSetting property. * * @param value * allowed object is * {@link UseDecimalType } * */ public void setNumberOfResidentialLivingUnitsMidRiseSetting(UseDecimalType value) { this.numberOfResidentialLivingUnitsMidRiseSetting = value; } /** * Gets the value of the numberOfLaundryHookupsInAllUnits property. * * @return * possible object is * {@link UseIntegerType } * */ public UseIntegerType getNumberOfLaundryHookupsInAllUnits() { return numberOfLaundryHookupsInAllUnits; } /** * Sets the value of the numberOfLaundryHookupsInAllUnits property. * * @param value * allowed object is * {@link UseIntegerType } * */ public void setNumberOfLaundryHookupsInAllUnits(UseIntegerType value) { this.numberOfLaundryHookupsInAllUnits = value; } /** * Gets the value of the numberOfLaundryHookupsInCommonArea property. * * @return * possible object is * {@link UseIntegerType } * */ public UseIntegerType getNumberOfLaundryHookupsInCommonArea() { return numberOfLaundryHookupsInCommonArea; } /** * Sets the value of the numberOfLaundryHookupsInCommonArea property. * * @param value * allowed object is * {@link UseIntegerType } * */ public void setNumberOfLaundryHookupsInCommonArea(UseIntegerType value) { this.numberOfLaundryHookupsInCommonArea = value; } /** * Gets the value of the numberOfResidentialLivingUnitsLowRiseSetting property. * * @return * possible object is * {@link UseDecimalType } * */ public UseDecimalType getNumberOfResidentialLivingUnitsLowRiseSetting() { return numberOfResidentialLivingUnitsLowRiseSetting; } /** * Sets the value of the numberOfResidentialLivingUnitsLowRiseSetting property. * * @param value * allowed object is * {@link UseDecimalType } * */ public void setNumberOfResidentialLivingUnitsLowRiseSetting(UseDecimalType value) { this.numberOfResidentialLivingUnitsLowRiseSetting = value; } /** * Gets the value of the percentHeated property. * * @return * possible object is * {@link PercentHeatedType } * */ public PercentHeatedType getPercentHeated() { return percentHeated; } /** * Sets the value of the percentHeated property. * * @param value * allowed object is * {@link PercentHeatedType } * */ public void setPercentHeated(PercentHeatedType value) { this.percentHeated = value; } /** * Gets the value of the percentCooled property. * * @return * possible object is * {@link PercentCooledType } * */ public PercentCooledType getPercentCooled() { return percentCooled; } /** * Sets the value of the percentCooled property. * * @param value * allowed object is * {@link PercentCooledType } * */ public void setPercentCooled(PercentCooledType value) { this.percentCooled = value; } /** * Gets the value of the numberOfResidentialLivingUnitsHighRiseSetting property. * * @return * possible object is * {@link UseDecimalType } * */ public UseDecimalType getNumberOfResidentialLivingUnitsHighRiseSetting() { return numberOfResidentialLivingUnitsHighRiseSetting; } /** * Sets the value of the numberOfResidentialLivingUnitsHighRiseSetting property. * * @param value * allowed object is * {@link UseDecimalType } * */ public void setNumberOfResidentialLivingUnitsHighRiseSetting(UseDecimalType value) { this.numberOfResidentialLivingUnitsHighRiseSetting = value; } /** * Gets the value of the residentPopulation property. * * @return * possible object is * {@link ResidentPopulationType } * */ public ResidentPopulationType getResidentPopulation() { return residentPopulation; } /** * Sets the value of the residentPopulation property. * * @param value * allowed object is * {@link ResidentPopulationType } * */ public void setResidentPopulation(ResidentPopulationType value) { this.residentPopulation = value; } /** * Gets the value of the governmentSubsidizedHousing property. * * @return * possible object is * {@link UseYesNoType } * */ public UseYesNoType getGovernmentSubsidizedHousing() { return governmentSubsidizedHousing; } /** * Sets the value of the governmentSubsidizedHousing property. * * @param value * allowed object is * {@link UseYesNoType } * */ public void setGovernmentSubsidizedHousing(UseYesNoType value) { this.governmentSubsidizedHousing = value; } } }
/* * Copyright (c) 2014 Hewlett-Packard Development Company, L.P. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package monasca.common.util; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertTrue; import javax.annotation.Nullable; import javax.inject.Inject; import net.sf.cglib.proxy.Enhancer; import net.sf.cglib.proxy.NoOp; import org.testng.annotations.Test; import com.codahale.metrics.annotation.Timed; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonRootName; import com.fasterxml.jackson.databind.JsonNode; import com.google.common.base.Preconditions; import com.google.inject.AbstractModule; import com.palominolabs.metrics.guice.InstrumentationModule; @Test public class SerializationTest { @JsonRootName("instance-id") public static class AnnotatedCommand { public String value = "test"; } public static class TestCommand { public String uuid; public int type; public TestCommand() { } public TestCommand(String uuid, int type) { this.uuid = uuid; this.type = type; } @Timed public void handleIt() { } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; TestCommand other = (TestCommand) obj; if (type != other.type) return false; if (uuid == null) { if (other.uuid != null) return false; } else if (!uuid.equals(other.uuid)) return false; return true; } } public static class TestInjectedCommand { transient @Inject String value; } @SuppressWarnings("unchecked") private static <T> T proxyFor(Class<T> type) throws Exception { Enhancer enhancer = new Enhancer(); enhancer.setSuperclass(TestCommand.class); enhancer.setCallbackType(NoOp.class); Class<T> enhanced = enhancer.createClass(); return enhanced.newInstance(); } @JsonRootName(value = "create-instance") @JsonIgnoreProperties(ignoreUnknown = true) static class CreateInstanceCommand { public String uuid; public String remoteUuid; public String remoteHostName; public String tenantId = "default_tenant"; public String userData; public Duration pollingFrequency; public Duration waitDuration; public CreateInstanceCommand() { } public CreateInstanceCommand(String uuid, @Nullable String remoteUuid, String remoteHostName, @Nullable String tenantId, @Nullable String userData, @Nullable Duration pollingFrequency, @Nullable Duration waitDuraion) { this.uuid = Preconditions.checkNotNull(uuid); this.remoteUuid = remoteUuid; this.remoteHostName = Preconditions.checkNotNull(remoteHostName); if (tenantId != null) this.tenantId = tenantId; this.userData = userData; this.pollingFrequency = pollingFrequency; this.waitDuration = waitDuraion; } } @Test(enabled = false) public void shouldDeserializeFromJsonElement() { JsonNode node = Serialization.toJsonNode("{\"uuid\":\"123\",\"type\":1}"); TestCommand command = Serialization.fromJson(node, TestCommand.class); assertEquals(command, new TestCommand("123", 1)); } public void shouldDeserializeFromWrappedJsonElement() { TestCommand command = new TestCommand("123", 1); JsonNode node = Serialization.toJsonNode(command); Serialization.registerTarget(TestCommand.class); TestCommand command2 = Serialization.fromJson(node); assertEquals(command2, command); } public void shouldDeserialize() { TestCommand command = new TestCommand("123", 1); String json = "{\"TestCommand\":{\"uuid\":\"123\",\"type\":1}}"; Serialization.registerTarget(TestCommand.class); TestCommand command2 = Serialization.fromJson(json); assertEquals(command2, command); } @Test(expectedExceptions = IllegalStateException.class) public void shouldFailToDeserializeOnUnknownTargetType() { String json = "{\"Blah\":{\"uuid\":\"123\",\"type\":1}}"; Serialization.fromJson(json); } public void shouldDeserializeWithRootType() { TestCommand command = new TestCommand("123", 1); String json = "{\"TestCommand\":{\"uuid\":\"123\",\"type\":1}}"; TestCommand command2 = Serialization.fromJson(json, TestCommand.class); assertEquals(command2, command); } public void shouldRegisterTargetUsingAnnotatedName() { Serialization.registerTarget(AnnotatedCommand.class); assertTrue(Serialization.targetTypes.containsKey("instance-id")); } public void shouldSerializeWrappedValueFromJsonToNode() { String json = "{\"TestCommand\":{\"uuid\":\"123\",\"type\":1}}"; JsonNode node = Serialization.toJsonNode(json); TestCommand cmd = Serialization.fromJson(node, TestCommand.class); assertEquals(cmd, new TestCommand("123", 1)); } public void shouldSerialize() { TestCommand command = new TestCommand("123", 1); String json = "{\"TestCommand\":{\"uuid\":\"123\",\"type\":1}}"; String ser = Serialization.toJson(command); assertEquals(ser, json); } // TODO enable after https://github.com/FasterXML/jackson-databind/issues/412 is fixed @Test(enabled = false) public void shouldSerializeAndDeserializeProxies() throws Throwable { TestCommand cmd = proxyFor(TestCommand.class); cmd.uuid = "1234"; cmd.type = 1; String json = Serialization.toJson(cmd); TestCommand cmd1 = Serialization.fromJson(json); assertEquals(cmd.uuid, cmd1.uuid); assertEquals(cmd.type, cmd1.type); } public void shouldSerializeAndDeserializeToRegisteredType() { TestCommand command = new TestCommand("123", 1); String json = Serialization.toJson(command); assertEquals(Serialization.fromJson(json), command); } public void toJsonShouldUseAnnotatedRootName() { String json = Serialization.toJson(new AnnotatedCommand()); String expected = "{\"instance-id\":{\"value\":\"test\"}}"; assertEquals(json, expected); } // TODO enable after https://github.com/FasterXML/jackson-databind/issues/412 is fixed @Test(enabled = false) public void shouldSerializeGuiceInstrumentedProxies() { Injector.reset(); Injector.registerModules(new InstrumentationModule()); TestCommand cmd = Injector.getInstance(TestCommand.class); cmd.uuid = "1234"; cmd.type = 5; String json = Serialization.toJson(cmd); TestCommand cmd1 = Serialization.fromJson(json); assertEquals(cmd, cmd1); } public void shouldProduceFullyInjectedInstances() { Injector.reset(); Injector.registerModules(new AbstractModule() { protected void configure() { bind(String.class).toInstance("test"); } }); String json = Serialization.toJson(new TestInjectedCommand()); TestInjectedCommand cmd = Serialization.fromJson(json); assertEquals(cmd.value, "test"); } public void shouldParseSingleQuotedJson() { String json = "{'args': {'volume': {'df': '/dev/vdb 505G 8.7G 471G 2% /mnt'}, 'state': 1, 'hostname': '3b2350a6-c28e-4d01-8a8d-c7c9b512c0f0'}, 'method': 'update_instance_state'}"; assertNotNull(Serialization.toJsonNode(json)); } public void shouldSupportSuperflousElements() { Serialization.registerTarget(TestCommand.class); String json = "{\"TestCommand\":{\"uuid\":\"123\",\"type\":1,\"foo\":\"bar\"}}"; TestCommand cmd = Serialization.fromJson(json); assertEquals(cmd.uuid, "123"); } @Test(enabled = false) public void shouldSerializeAndDeserializeNullValues() { } public void shouldSerializeAndDeserializePrimitives() { int i = 1; String json = Serialization.toJson(i); int ii = Serialization.<Integer>fromJson(json); assertEquals(i, ii); Integer j = 1; json = Serialization.toJson(i); Integer jj = Serialization.<Integer>fromJson(json); assertEquals(j, jj); } }
/* * Copyright 2015 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.security.impl; import com.google.common.collect.Lists; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.ReferenceCardinality; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Service; import org.onosproject.app.ApplicationAdminService; import org.onosproject.app.ApplicationState; import org.onosproject.core.Application; import org.onosproject.core.ApplicationId; import org.onosproject.event.EventDeliveryService; import org.onosproject.event.ListenerRegistry; import org.onosproject.security.AppPermission; import org.onosproject.security.SecurityAdminService; import org.onosproject.security.store.SecurityModeEvent; import org.onosproject.security.store.SecurityModeListener; import org.onosproject.security.store.SecurityModeStore; import org.onosproject.security.store.SecurityModeStoreDelegate; import org.osgi.framework.BundleContext; import org.osgi.framework.FrameworkUtil; import org.osgi.framework.ServicePermission; import org.osgi.service.log.LogEntry; import org.osgi.service.log.LogListener; import org.osgi.service.log.LogReaderService; import org.osgi.service.permissionadmin.PermissionInfo; import java.security.AccessControlException; import java.security.Permission; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import org.osgi.service.permissionadmin.PermissionAdmin; import org.slf4j.Logger; import static org.slf4j.LoggerFactory.getLogger; /** * Security-Mode ONOS management implementation. * * Note: Activating Security-Mode ONOS has significant performance implications in Drake. * See the wiki for instructions on how to activate it. */ @Component(immediate = true) @Service public class SecurityModeManager implements SecurityAdminService { @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected SecurityModeStore store; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected ApplicationAdminService appAdminService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected LogReaderService logReaderService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected EventDeliveryService eventDispatcher; private final Logger log = getLogger(getClass()); protected final ListenerRegistry<SecurityModeEvent, SecurityModeListener> listenerRegistry = new ListenerRegistry<>(); private final SecurityModeStoreDelegate delegate = new InternalStoreDelegate(); private SecurityLogListener securityLogListener = new SecurityLogListener(); private PermissionAdmin permissionAdmin = getPermissionAdmin(); @Activate public void activate() { eventDispatcher.addSink(SecurityModeEvent.class, listenerRegistry); logReaderService.addLogListener(securityLogListener); if (System.getSecurityManager() == null) { log.warn("J2EE security manager is disabled."); deactivate(); return; } if (permissionAdmin == null) { log.warn("Permission Admin not found."); deactivate(); return; } store.setDelegate(delegate); log.info("Security-Mode Started"); } @Deactivate public void deactivate() { eventDispatcher.removeSink(SecurityModeEvent.class); logReaderService.removeLogListener(securityLogListener); store.unsetDelegate(delegate); log.info("Stopped"); } @Override public boolean isSecured(ApplicationId appId) { if (store.getState(appId) == null) { store.registerApplication(appId); } return store.isSecured(appId); } @Override public void review(ApplicationId appId) { if (store.getState(appId) == null) { store.registerApplication(appId); } store.reviewPolicy(appId); } @Override public void acceptPolicy(ApplicationId appId) { if (store.getState(appId) == null) { store.registerApplication(appId); } store.acceptPolicy(appId, DefaultPolicyBuilder.convertToOnosPermissions(getMaximumPermissions(appId))); } @Override public void register(ApplicationId appId) { store.registerApplication(appId); } @Override public Map<Integer, List<Permission>> getPrintableSpecifiedPermissions(ApplicationId appId) { return getPrintablePermissionMap(getMaximumPermissions(appId)); } @Override public Map<Integer, List<Permission>> getPrintableGrantedPermissions(ApplicationId appId) { return getPrintablePermissionMap( DefaultPolicyBuilder.convertToJavaPermissions(store.getGrantedPermissions(appId))); } @Override public Map<Integer, List<Permission>> getPrintableRequestedPermissions(ApplicationId appId) { return getPrintablePermissionMap( DefaultPolicyBuilder.convertToJavaPermissions(store.getRequestedPermissions(appId))); } private class SecurityLogListener implements LogListener { @Override public void logged(LogEntry entry) { if (entry.getException() != null && entry.getException() instanceof AccessControlException) { String location = entry.getBundle().getLocation(); Permission javaPerm = ((AccessControlException) entry.getException()).getPermission(); org.onosproject.security.Permission permission = DefaultPolicyBuilder.getOnosPermission(javaPerm); if (permission == null) { log.warn("Unsupported permission requested."); return; } store.getApplicationIds(location).stream().filter( appId -> store.isSecured(appId) && appAdminService.getState(appId) == ApplicationState.ACTIVE).forEach(appId -> { store.requestPermission(appId, permission); print("[POLICY VIOLATION] APP: %s / Bundle: %s / Permission: %s ", appId.name(), location, permission.toString()); }); } } } private class InternalStoreDelegate implements SecurityModeStoreDelegate { @Override public void notify(SecurityModeEvent event) { if (event.type() == SecurityModeEvent.Type.POLICY_ACCEPTED) { setLocalPermissions(event.subject()); log.info("{} POLICY ACCEPTED and ENFORCED", event.subject().name()); } else if (event.type() == SecurityModeEvent.Type.POLICY_VIOLATED) { log.info("{} POLICY VIOLATED", event.subject().name()); } else if (event.type() == SecurityModeEvent.Type.POLICY_REVIEWED) { log.info("{} POLICY REVIEWED", event.subject().name()); } eventDispatcher.post(event); } } /** * TYPES. * 0 - APP_PERM * 1 - ADMIN SERVICE * 2 - NB_SERVICE * 3 - ETC_SERVICE * 4 - ETC * @param perms */ private Map<Integer, List<Permission>> getPrintablePermissionMap(List<Permission> perms) { ConcurrentHashMap<Integer, List<Permission>> sortedMap = new ConcurrentHashMap<>(); sortedMap.put(0, new ArrayList()); sortedMap.put(1, new ArrayList()); sortedMap.put(2, new ArrayList()); sortedMap.put(3, new ArrayList()); sortedMap.put(4, new ArrayList()); for (Permission perm : perms) { if (perm instanceof ServicePermission) { if (DefaultPolicyBuilder.getNBServiceList().contains(perm.getName())) { if (perm.getName().contains("Admin")) { sortedMap.get(1).add(perm); } else { sortedMap.get(2).add(perm); } } else { sortedMap.get(3).add(perm); } } else if (perm instanceof AppPermission) { sortedMap.get(0).add(perm); } else { sortedMap.get(4).add(perm); } } return sortedMap; } private void setLocalPermissions(ApplicationId applicationId) { for (String location : store.getBundleLocations(applicationId)) { permissionAdmin.setPermissions(location, permissionsToInfo(store.getGrantedPermissions(applicationId))); } } private PermissionInfo[] permissionsToInfo(Set<org.onosproject.security.Permission> permissions) { List<PermissionInfo> result = Lists.newArrayList(); for (org.onosproject.security.Permission perm : permissions) { result.add(new PermissionInfo(perm.getClassName(), perm.getName(), perm.getActions())); } PermissionInfo[] permissionInfos = new PermissionInfo[result.size()]; return result.toArray(permissionInfos); } private List<Permission> getMaximumPermissions(ApplicationId appId) { Application app = appAdminService.getApplication(appId); if (app == null) { print("Unknown application."); return null; } List<Permission> appPerms; switch (app.role()) { case ADMIN: appPerms = DefaultPolicyBuilder.getAdminApplicationPermissions(app.permissions()); break; case USER: appPerms = DefaultPolicyBuilder.getUserApplicationPermissions(app.permissions()); break; case UNSPECIFIED: default: appPerms = DefaultPolicyBuilder.getDefaultPerms(); break; } return appPerms; } private void print(String format, Object... args) { System.out.println(String.format("SM-ONOS: " + format, args)); log.warn(String.format(format, args)); } private PermissionAdmin getPermissionAdmin() { BundleContext context = getBundleContext(); return (PermissionAdmin) context.getService(context.getServiceReference(PermissionAdmin.class.getName())); } private BundleContext getBundleContext() { return FrameworkUtil.getBundle(this.getClass()).getBundleContext(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by http://code.google.com/p/protostuff/ ... DO NOT EDIT! // Generated from protobuf package org.apache.drill.exec.proto.beans; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import com.dyuproject.protostuff.GraphIOUtil; import com.dyuproject.protostuff.Input; import com.dyuproject.protostuff.Message; import com.dyuproject.protostuff.Output; import com.dyuproject.protostuff.Schema; public final class DrillbitEndpoint implements Externalizable, Message<DrillbitEndpoint>, Schema<DrillbitEndpoint> { public static Schema<DrillbitEndpoint> getSchema() { return DEFAULT_INSTANCE; } public static DrillbitEndpoint getDefaultInstance() { return DEFAULT_INSTANCE; } static final DrillbitEndpoint DEFAULT_INSTANCE = new DrillbitEndpoint(); private String address; private int userPort; private int controlPort; private int dataPort; private Roles roles; public DrillbitEndpoint() { } // getters and setters // address public String getAddress() { return address; } public DrillbitEndpoint setAddress(String address) { this.address = address; return this; } // userPort public int getUserPort() { return userPort; } public DrillbitEndpoint setUserPort(int userPort) { this.userPort = userPort; return this; } // controlPort public int getControlPort() { return controlPort; } public DrillbitEndpoint setControlPort(int controlPort) { this.controlPort = controlPort; return this; } // dataPort public int getDataPort() { return dataPort; } public DrillbitEndpoint setDataPort(int dataPort) { this.dataPort = dataPort; return this; } // roles public Roles getRoles() { return roles; } public DrillbitEndpoint setRoles(Roles roles) { this.roles = roles; return this; } // java serialization public void readExternal(ObjectInput in) throws IOException { GraphIOUtil.mergeDelimitedFrom(in, this, this); } public void writeExternal(ObjectOutput out) throws IOException { GraphIOUtil.writeDelimitedTo(out, this, this); } // message method public Schema<DrillbitEndpoint> cachedSchema() { return DEFAULT_INSTANCE; } // schema methods public DrillbitEndpoint newMessage() { return new DrillbitEndpoint(); } public Class<DrillbitEndpoint> typeClass() { return DrillbitEndpoint.class; } public String messageName() { return DrillbitEndpoint.class.getSimpleName(); } public String messageFullName() { return DrillbitEndpoint.class.getName(); } public boolean isInitialized(DrillbitEndpoint message) { return true; } public void mergeFrom(Input input, DrillbitEndpoint message) throws IOException { for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this)) { switch(number) { case 0: return; case 1: message.address = input.readString(); break; case 2: message.userPort = input.readInt32(); break; case 3: message.controlPort = input.readInt32(); break; case 4: message.dataPort = input.readInt32(); break; case 5: message.roles = input.mergeObject(message.roles, Roles.getSchema()); break; default: input.handleUnknownField(number, this); } } } public void writeTo(Output output, DrillbitEndpoint message) throws IOException { if(message.address != null) output.writeString(1, message.address, false); if(message.userPort != 0) output.writeInt32(2, message.userPort, false); if(message.controlPort != 0) output.writeInt32(3, message.controlPort, false); if(message.dataPort != 0) output.writeInt32(4, message.dataPort, false); if(message.roles != null) output.writeObject(5, message.roles, Roles.getSchema(), false); } public String getFieldName(int number) { switch(number) { case 1: return "address"; case 2: return "userPort"; case 3: return "controlPort"; case 4: return "dataPort"; case 5: return "roles"; default: return null; } } public int getFieldNumber(String name) { final Integer number = __fieldMap.get(name); return number == null ? 0 : number.intValue(); } private static final java.util.HashMap<String,Integer> __fieldMap = new java.util.HashMap<String,Integer>(); static { __fieldMap.put("address", 1); __fieldMap.put("userPort", 2); __fieldMap.put("controlPort", 3); __fieldMap.put("dataPort", 4); __fieldMap.put("roles", 5); } }
/* Copyright (c) 2012 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.atc.utilidad; import android.app.Activity; import android.app.PendingIntent; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.content.IntentSender.SendIntentException; import android.content.ServiceConnection; import android.os.Bundle; import android.os.Handler; import android.os.IBinder; import android.os.RemoteException; import android.text.TextUtils; import android.util.Log; import com.android.vending.billing.IInAppBillingService; import org.json.JSONException; import java.util.ArrayList; import java.util.List; /** * Provides convenience methods for in-app billing. You can create one instance of this * class for your application and use it to process in-app billing operations. * It provides synchronous (blocking) and asynchronous (non-blocking) methods for * many common in-app billing operations, as well as automatic signature * verification. * * After instantiating, you must perform setup in order to start using the object. * To perform setup, call the {@link #startSetup} method and provide a listener; * that listener will be notified when setup is complete, after which (and not before) * you may call other methods. * * After setup is complete, you will typically want to request an inventory of owned * items and subscriptions. See {@link #queryInventory}, {@link #queryInventoryAsync} * and related methods. * * When you are done with this object, don't forget to call {@link #dispose} * to ensure proper cleanup. This object holds a binding to the in-app billing * service, which will leak unless you dispose of it correctly. If you created * the object on an Activity's onCreate method, then the recommended * place to dispose of it is the Activity's onDestroy method. * * A note about threading: When using this object from a background thread, you may * call the blocking versions of methods; when using from a UI thread, call * only the asynchronous versions and handle the results via callbacks. * Also, notice that you can only call one asynchronous operation at a time; * attempting to start a second asynchronous operation while the first one * has not yet completed will result in an exception being thrown. * * @author Bruno Oliveira (Google) * */ public class IabHelper { // Is debug logging enabled? boolean mDebugLog = false; String mDebugTag = "IabHelper"; // Is setup done? boolean mSetupDone = false; // Has this object been disposed of? (If so, we should ignore callbacks, etc) boolean mDisposed = false; // Are subscriptions supported? boolean mSubscriptionsSupported = false; // Is an asynchronous operation in progress? // (only one at a time can be in progress) boolean mAsyncInProgress = false; // (for logging/debugging) // if mAsyncInProgress == true, what asynchronous operation is in progress? String mAsyncOperation = ""; // Context we were passed during initialization Context mContext; // Connection to the service IInAppBillingService mService; ServiceConnection mServiceConn; // The request code used to launch purchase flow int mRequestCode; // The item type of the current purchase flow String mPurchasingItemType; // Public key for verifying signature, in base64 encoding String mSignatureBase64 = null; // Billing response codes public static final int BILLING_RESPONSE_RESULT_OK = 0; public static final int BILLING_RESPONSE_RESULT_USER_CANCELED = 1; public static final int BILLING_RESPONSE_RESULT_BILLING_UNAVAILABLE = 3; public static final int BILLING_RESPONSE_RESULT_ITEM_UNAVAILABLE = 4; public static final int BILLING_RESPONSE_RESULT_DEVELOPER_ERROR = 5; public static final int BILLING_RESPONSE_RESULT_ERROR = 6; public static final int BILLING_RESPONSE_RESULT_ITEM_ALREADY_OWNED = 7; public static final int BILLING_RESPONSE_RESULT_ITEM_NOT_OWNED = 8; // IAB Helper error codes public static final int IABHELPER_ERROR_BASE = -1000; public static final int IABHELPER_REMOTE_EXCEPTION = -1001; public static final int IABHELPER_BAD_RESPONSE = -1002; public static final int IABHELPER_VERIFICATION_FAILED = -1003; public static final int IABHELPER_SEND_INTENT_FAILED = -1004; public static final int IABHELPER_USER_CANCELLED = -1005; public static final int IABHELPER_UNKNOWN_PURCHASE_RESPONSE = -1006; public static final int IABHELPER_MISSING_TOKEN = -1007; public static final int IABHELPER_UNKNOWN_ERROR = -1008; public static final int IABHELPER_SUBSCRIPTIONS_NOT_AVAILABLE = -1009; public static final int IABHELPER_INVALID_CONSUMPTION = -1010; // Keys for the responses from InAppBillingService public static final String RESPONSE_CODE = "RESPONSE_CODE"; public static final String RESPONSE_GET_SKU_DETAILS_LIST = "DETAILS_LIST"; public static final String RESPONSE_BUY_INTENT = "BUY_INTENT"; public static final String RESPONSE_INAPP_PURCHASE_DATA = "INAPP_PURCHASE_DATA"; public static final String RESPONSE_INAPP_SIGNATURE = "INAPP_DATA_SIGNATURE"; public static final String RESPONSE_INAPP_ITEM_LIST = "INAPP_PURCHASE_ITEM_LIST"; public static final String RESPONSE_INAPP_PURCHASE_DATA_LIST = "INAPP_PURCHASE_DATA_LIST"; public static final String RESPONSE_INAPP_SIGNATURE_LIST = "INAPP_DATA_SIGNATURE_LIST"; public static final String INAPP_CONTINUATION_TOKEN = "INAPP_CONTINUATION_TOKEN"; // Item types public static final String ITEM_TYPE_INAPP = "inapp"; public static final String ITEM_TYPE_SUBS = "subs"; // some fields on the getSkuDetails response bundle public static final String GET_SKU_DETAILS_ITEM_LIST = "ITEM_ID_LIST"; public static final String GET_SKU_DETAILS_ITEM_TYPE_LIST = "ITEM_TYPE_LIST"; /** * Creates an instance. After creation, it will not yet be ready to use. You must perform * setup by calling {@link #startSetup} and wait for setup to complete. This constructor does not * block and is safe to call from a UI thread. * * @param ctx Your application or Activity context. Needed to bind to the in-app billing service. * @param base64PublicKey Your application's public key, encoded in base64. * This is used for verification of purchase signatures. You can find your app's base64-encoded * public key in your application's page on Google Play Developer Console. Note that this * is NOT your "developer public key". */ public IabHelper(Context ctx, String base64PublicKey) { mContext = ctx.getApplicationContext(); mSignatureBase64 = base64PublicKey; logDebug("IAB helper created."); } /** * Enables or disable debug logging through LogCat. */ public void enableDebugLogging(boolean enable, String tag) { checkNotDisposed(); mDebugLog = enable; mDebugTag = tag; } public void enableDebugLogging(boolean enable) { checkNotDisposed(); mDebugLog = enable; } /** * Callback for setup process. This listener's {@link #onIabSetupFinished} method is called * when the setup process is complete. */ public interface OnIabSetupFinishedListener { /** * Called to notify that setup is complete. * * @param result The result of the setup process. */ public void onIabSetupFinished(IabResult result); } /** * Starts the setup process. This will start up the setup process asynchronously. * You will be notified through the listener when the setup process is complete. * This method is safe to call from a UI thread. * * @param listener The listener to notify when the setup process is complete. */ public void startSetup(final OnIabSetupFinishedListener listener) { // If already set up, can't do it again. checkNotDisposed(); if (mSetupDone) throw new IllegalStateException("IAB helper is already set up."); // Connection to IAB service logDebug("Starting in-app billing setup."); mServiceConn = new ServiceConnection() { @Override public void onServiceDisconnected(ComponentName name) { logDebug("Billing service disconnected."); mService = null; } @Override public void onServiceConnected(ComponentName name, IBinder service) { if (mDisposed) return; logDebug("Billing service connected."); mService = IInAppBillingService.Stub.asInterface(service); String packageName = mContext.getPackageName(); try { logDebug("Checking for in-app billing 3 support."); // check for in-app billing v3 support int response = mService.isBillingSupported(3, packageName, ITEM_TYPE_INAPP); if (response != BILLING_RESPONSE_RESULT_OK) { if (listener != null) listener.onIabSetupFinished(new IabResult(response, "Error checking for billing v3 support.")); // if in-app purchases aren't supported, neither are subscriptions. mSubscriptionsSupported = false; return; } logDebug("In-app billing version 3 supported for " + packageName); // check for v3 subscriptions support response = mService.isBillingSupported(3, packageName, ITEM_TYPE_SUBS); if (response == BILLING_RESPONSE_RESULT_OK) { logDebug("Subscriptions AVAILABLE."); mSubscriptionsSupported = true; } else { logDebug("Subscriptions NOT AVAILABLE. Response: " + response); } mSetupDone = true; } catch (RemoteException e) { if (listener != null) { listener.onIabSetupFinished(new IabResult(IABHELPER_REMOTE_EXCEPTION, "RemoteException while setting up in-app billing.")); } e.printStackTrace(); return; } if (listener != null) { listener.onIabSetupFinished(new IabResult(BILLING_RESPONSE_RESULT_OK, "Setup successful.")); } } }; Intent serviceIntent = new Intent("com.android.vending.billing.InAppBillingService.BIND"); serviceIntent.setPackage("com.android.vending"); if (!mContext.getPackageManager().queryIntentServices(serviceIntent, 0).isEmpty()) { // service available to handle that Intent mContext.bindService(serviceIntent, mServiceConn, Context.BIND_AUTO_CREATE); } else { // no service available to handle that Intent if (listener != null) { listener.onIabSetupFinished( new IabResult(BILLING_RESPONSE_RESULT_BILLING_UNAVAILABLE, "Billing service unavailable on device.")); } } } /** * Dispose of object, releasing resources. It's very important to call this * method when you are done with this object. It will release any resources * used by it such as service connections. Naturally, once the object is * disposed of, it can't be used again. */ public void dispose() { logDebug("Disposing."); mSetupDone = false; if (mServiceConn != null) { logDebug("Unbinding from service."); if (mContext != null) mContext.unbindService(mServiceConn); } mDisposed = true; mContext = null; mServiceConn = null; mService = null; mPurchaseListener = null; } private void checkNotDisposed() { if (mDisposed) throw new IllegalStateException("IabHelper was disposed of, so it cannot be used."); } /** Returns whether subscriptions are supported. */ public boolean subscriptionsSupported() { checkNotDisposed(); return mSubscriptionsSupported; } /** * Callback that notifies when a purchase is finished. */ public interface OnIabPurchaseFinishedListener { /** * Called to notify that an in-app purchase finished. If the purchase was successful, * then the sku parameter specifies which item was purchased. If the purchase failed, * the sku and extraData parameters may or may not be null, depending on how far the purchase * process went. * * @param result The result of the purchase. * @param info The purchase information (null if purchase failed) */ public void onIabPurchaseFinished(IabResult result, Purchase info); } // The listener registered on launchPurchaseFlow, which we have to call back when // the purchase finishes OnIabPurchaseFinishedListener mPurchaseListener; public void launchPurchaseFlow(Activity act, String sku, int requestCode, OnIabPurchaseFinishedListener listener) { launchPurchaseFlow(act, sku, requestCode, listener, ""); } public void launchPurchaseFlow(Activity act, String sku, int requestCode, OnIabPurchaseFinishedListener listener, String extraData) { launchPurchaseFlow(act, sku, ITEM_TYPE_INAPP, requestCode, listener, extraData); } public void launchSubscriptionPurchaseFlow(Activity act, String sku, int requestCode, OnIabPurchaseFinishedListener listener) { launchSubscriptionPurchaseFlow(act, sku, requestCode, listener, ""); } public void launchSubscriptionPurchaseFlow(Activity act, String sku, int requestCode, OnIabPurchaseFinishedListener listener, String extraData) { launchPurchaseFlow(act, sku, ITEM_TYPE_SUBS, requestCode, listener, extraData); } /** * Initiate the UI flow for an in-app purchase. Call this method to initiate an in-app purchase, * which will involve bringing up the Google Play screen. The calling activity will be paused while * the user interacts with Google Play, and the result will be delivered via the activity's * {@link Activity#onActivityResult} method, at which point you must call * this object's {@link #handleActivityResult} method to continue the purchase flow. This method * MUST be called from the UI thread of the Activity. * * @param act The calling activity. * @param sku The sku of the item to purchase. * @param itemType indicates if it's a product or a subscription (ITEM_TYPE_INAPP or ITEM_TYPE_SUBS) * @param requestCode A request code (to differentiate from other responses -- * as in {@link Activity#startActivityForResult}). * @param listener The listener to notify when the purchase process finishes * @param extraData Extra data (developer payload), which will be returned with the purchase data * when the purchase completes. This extra data will be permanently bound to that purchase * and will always be returned when the purchase is queried. */ public void launchPurchaseFlow(Activity act, String sku, String itemType, int requestCode, OnIabPurchaseFinishedListener listener, String extraData) { checkNotDisposed(); checkSetupDone("launchPurchaseFlow"); flagStartAsync("launchPurchaseFlow"); IabResult result; if (itemType.equals(ITEM_TYPE_SUBS) && !mSubscriptionsSupported) { IabResult r = new IabResult(IABHELPER_SUBSCRIPTIONS_NOT_AVAILABLE, "Subscriptions are not available."); flagEndAsync(); if (listener != null) listener.onIabPurchaseFinished(r, null); return; } try { logDebug("Constructing buy intent for " + sku + ", item type: " + itemType); Bundle buyIntentBundle = mService.getBuyIntent(3, mContext.getPackageName(), sku, itemType, extraData); int response = getResponseCodeFromBundle(buyIntentBundle); if (response != BILLING_RESPONSE_RESULT_OK) { logError("Unable to buy item, Error response: " + getResponseDesc(response)); flagEndAsync(); result = new IabResult(response, "Unable to buy item"); if (listener != null) listener.onIabPurchaseFinished(result, null); return; } PendingIntent pendingIntent = buyIntentBundle.getParcelable(RESPONSE_BUY_INTENT); logDebug("Launching buy intent for " + sku + ". Request code: " + requestCode); mRequestCode = requestCode; mPurchaseListener = listener; mPurchasingItemType = itemType; act.startIntentSenderForResult(pendingIntent.getIntentSender(), requestCode, new Intent(), Integer.valueOf(0), Integer.valueOf(0), Integer.valueOf(0)); } catch (SendIntentException e) { logError("SendIntentException while launching purchase flow for sku " + sku); e.printStackTrace(); flagEndAsync(); result = new IabResult(IABHELPER_SEND_INTENT_FAILED, "Failed to send intent."); if (listener != null) listener.onIabPurchaseFinished(result, null); } catch (RemoteException e) { logError("RemoteException while launching purchase flow for sku " + sku); e.printStackTrace(); flagEndAsync(); result = new IabResult(IABHELPER_REMOTE_EXCEPTION, "Remote exception while starting purchase flow"); if (listener != null) listener.onIabPurchaseFinished(result, null); } } /** * Handles an activity result that's part of the purchase flow in in-app billing. If you * are calling {@link #launchPurchaseFlow}, then you must call this method from your * Activity's {@link Activity@onActivityResult} method. This method * MUST be called from the UI thread of the Activity. * * @param requestCode The requestCode as you received it. * @param resultCode The resultCode as you received it. * @param data The data (Intent) as you received it. * @return Returns true if the result was related to a purchase flow and was handled; * false if the result was not related to a purchase, in which case you should * handle it normally. */ public boolean handleActivityResult(int requestCode, int resultCode, Intent data) { IabResult result; if (requestCode != mRequestCode) return false; checkNotDisposed(); checkSetupDone("handleActivityResult"); // end of async purchase operation that started on launchPurchaseFlow flagEndAsync(); if (data == null) { logError("Null data in IAB activity result."); result = new IabResult(IABHELPER_BAD_RESPONSE, "Null data in IAB result"); if (mPurchaseListener != null) mPurchaseListener.onIabPurchaseFinished(result, null); return true; } int responseCode = getResponseCodeFromIntent(data); String purchaseData = data.getStringExtra(RESPONSE_INAPP_PURCHASE_DATA); String dataSignature = data.getStringExtra(RESPONSE_INAPP_SIGNATURE); if (resultCode == Activity.RESULT_OK && responseCode == BILLING_RESPONSE_RESULT_OK) { logDebug("Successful resultcode from purchase activity."); logDebug("Purchase data: " + purchaseData); logDebug("Data signature: " + dataSignature); logDebug("Extras: " + data.getExtras()); logDebug("Expected item type: " + mPurchasingItemType); if (purchaseData == null || dataSignature == null) { logError("BUG: either purchaseData or dataSignature is null."); logDebug("Extras: " + data.getExtras().toString()); result = new IabResult(IABHELPER_UNKNOWN_ERROR, "IAB returned null purchaseData or dataSignature"); if (mPurchaseListener != null) mPurchaseListener.onIabPurchaseFinished(result, null); return true; } Purchase purchase = null; try { purchase = new Purchase(mPurchasingItemType, purchaseData, dataSignature); String sku = purchase.getSku(); // Verify signature if (!Security.verifyPurchase(mSignatureBase64, purchaseData, dataSignature)) { logError("Purchase signature verification FAILED for sku " + sku); result = new IabResult(IABHELPER_VERIFICATION_FAILED, "Signature verification failed for sku " + sku); if (mPurchaseListener != null) mPurchaseListener.onIabPurchaseFinished(result, purchase); return true; } logDebug("Purchase signature successfully verified."); } catch (JSONException e) { logError("Failed to parse purchase data."); e.printStackTrace(); result = new IabResult(IABHELPER_BAD_RESPONSE, "Failed to parse purchase data."); if (mPurchaseListener != null) mPurchaseListener.onIabPurchaseFinished(result, null); return true; } if (mPurchaseListener != null) { mPurchaseListener.onIabPurchaseFinished(new IabResult(BILLING_RESPONSE_RESULT_OK, "Success"), purchase); } } else if (resultCode == Activity.RESULT_OK) { // result code was OK, but in-app billing response was not OK. logDebug("Result code was OK but in-app billing response was not OK: " + getResponseDesc(responseCode)); if (mPurchaseListener != null) { result = new IabResult(responseCode, "Problem purchashing item."); mPurchaseListener.onIabPurchaseFinished(result, null); } } else if (resultCode == Activity.RESULT_CANCELED) { logDebug("Purchase canceled - Response: " + getResponseDesc(responseCode)); result = new IabResult(IABHELPER_USER_CANCELLED, "User canceled."); if (mPurchaseListener != null) mPurchaseListener.onIabPurchaseFinished(result, null); } else { logError("Purchase failed. Result code: " + Integer.toString(resultCode) + ". Response: " + getResponseDesc(responseCode)); result = new IabResult(IABHELPER_UNKNOWN_PURCHASE_RESPONSE, "Unknown purchase response."); if (mPurchaseListener != null) mPurchaseListener.onIabPurchaseFinished(result, null); } return true; } public Inventory queryInventory(boolean querySkuDetails, List<String> moreSkus) throws IabException { return queryInventory(querySkuDetails, moreSkus, null); } /** * Queries the inventory. This will query all owned items from the server, as well as * information on additional skus, if specified. This method may block or take long to execute. * Do not call from a UI thread. For that, use the non-blocking version {@link #refreshInventoryAsync}. * * @param querySkuDetails if true, SKU details (price, description, etc) will be queried as well * as purchase information. * @param moreItemSkus additional PRODUCT skus to query information on, regardless of ownership. * Ignored if null or if querySkuDetails is false. * @param moreSubsSkus additional SUBSCRIPTIONS skus to query information on, regardless of ownership. * Ignored if null or if querySkuDetails is false. * @throws IabException if a problem occurs while refreshing the inventory. */ public Inventory queryInventory(boolean querySkuDetails, List<String> moreItemSkus, List<String> moreSubsSkus) throws IabException { checkNotDisposed(); checkSetupDone("queryInventory"); try { Inventory inv = new Inventory(); int r = queryPurchases(inv, ITEM_TYPE_INAPP); if (r != BILLING_RESPONSE_RESULT_OK) { throw new IabException(r, "Error refreshing inventory (querying owned items)."); } if (querySkuDetails) { r = querySkuDetails(ITEM_TYPE_INAPP, inv, moreItemSkus); if (r != BILLING_RESPONSE_RESULT_OK) { throw new IabException(r, "Error refreshing inventory (querying prices of items)."); } } // if subscriptions are supported, then also query for subscriptions if (mSubscriptionsSupported) { r = queryPurchases(inv, ITEM_TYPE_SUBS); if (r != BILLING_RESPONSE_RESULT_OK) { throw new IabException(r, "Error refreshing inventory (querying owned subscriptions)."); } if (querySkuDetails) { r = querySkuDetails(ITEM_TYPE_SUBS, inv, moreItemSkus); if (r != BILLING_RESPONSE_RESULT_OK) { throw new IabException(r, "Error refreshing inventory (querying prices of subscriptions)."); } } } return inv; } catch (RemoteException e) { throw new IabException(IABHELPER_REMOTE_EXCEPTION, "Remote exception while refreshing inventory.", e); } catch (JSONException e) { throw new IabException(IABHELPER_BAD_RESPONSE, "Error parsing JSON response while refreshing inventory.", e); } } /** * Listener that notifies when an inventory query operation completes. */ public interface QueryInventoryFinishedListener { /** * Called to notify that an inventory query operation completed. * * @param result The result of the operation. * @param inv The inventory. */ public void onQueryInventoryFinished(IabResult result, Inventory inv); } /** * Asynchronous wrapper for inventory query. This will perform an inventory * query as described in {@link #queryInventory}, but will do so asynchronously * and call back the specified listener upon completion. This method is safe to * call from a UI thread. * * @param querySkuDetails as in {@link #queryInventory} * @param moreSkus as in {@link #queryInventory} * @param listener The listener to notify when the refresh operation completes. */ public void queryInventoryAsync(final boolean querySkuDetails, final List<String> moreSkus, final QueryInventoryFinishedListener listener) { final Handler handler = new Handler(); checkNotDisposed(); checkSetupDone("queryInventory"); flagStartAsync("refresh inventory"); (new Thread(new Runnable() { public void run() { IabResult result = new IabResult(BILLING_RESPONSE_RESULT_OK, "Inventory refresh successful."); Inventory inv = null; try { inv = queryInventory(querySkuDetails, moreSkus); } catch (IabException ex) { result = ex.getResult(); } flagEndAsync(); final IabResult result_f = result; final Inventory inv_f = inv; if (!mDisposed && listener != null) { handler.post(new Runnable() { public void run() { listener.onQueryInventoryFinished(result_f, inv_f); } }); } } })).start(); } public void queryInventoryAsync(QueryInventoryFinishedListener listener) { queryInventoryAsync(true, null, listener); } public void queryInventoryAsync(boolean querySkuDetails, QueryInventoryFinishedListener listener) { queryInventoryAsync(querySkuDetails, null, listener); } /** * Consumes a given in-app product. Consuming can only be done on an item * that's owned, and as a result of consumption, the user will no longer own it. * This method may block or take long to return. Do not call from the UI thread. * For that, see {@link #consumeAsync}. * * @param itemInfo The PurchaseInfo that represents the item to consume. * @throws IabException if there is a problem during consumption. */ void consume(Purchase itemInfo) throws IabException { checkNotDisposed(); checkSetupDone("consume"); if (!itemInfo.mItemType.equals(ITEM_TYPE_INAPP)) { throw new IabException(IABHELPER_INVALID_CONSUMPTION, "Items of type '" + itemInfo.mItemType + "' can't be consumed."); } try { String token = itemInfo.getToken(); String sku = itemInfo.getSku(); if (token == null || token.equals("")) { logError("Can't consume "+ sku + ". No token."); throw new IabException(IABHELPER_MISSING_TOKEN, "PurchaseInfo is missing token for sku: " + sku + " " + itemInfo); } logDebug("Consuming sku: " + sku + ", token: " + token); int response = mService.consumePurchase(3, mContext.getPackageName(), token); if (response == BILLING_RESPONSE_RESULT_OK) { logDebug("Successfully consumed sku: " + sku); } else { logDebug("Error consuming consuming sku " + sku + ". " + getResponseDesc(response)); throw new IabException(response, "Error consuming sku " + sku); } } catch (RemoteException e) { throw new IabException(IABHELPER_REMOTE_EXCEPTION, "Remote exception while consuming. PurchaseInfo: " + itemInfo, e); } } /** * Callback that notifies when a consumption operation finishes. */ public interface OnConsumeFinishedListener { /** * Called to notify that a consumption has finished. * * @param purchase The purchase that was (or was to be) consumed. * @param result The result of the consumption operation. */ public void onConsumeFinished(Purchase purchase, IabResult result); } /** * Callback that notifies when a multi-item consumption operation finishes. */ public interface OnConsumeMultiFinishedListener { /** * Called to notify that a consumption of multiple items has finished. * * @param purchases The purchases that were (or were to be) consumed. * @param results The results of each consumption operation, corresponding to each * sku. */ public void onConsumeMultiFinished(List<Purchase> purchases, List<IabResult> results); } /** * Asynchronous wrapper to item consumption. Works like {@link #consume}, but * performs the consumption in the background and notifies completion through * the provided listener. This method is safe to call from a UI thread. * * @param purchase The purchase to be consumed. * @param listener The listener to notify when the consumption operation finishes. */ public void consumeAsync(Purchase purchase, OnConsumeFinishedListener listener) { checkNotDisposed(); checkSetupDone("consume"); List<Purchase> purchases = new ArrayList<Purchase>(); purchases.add(purchase); consumeAsyncInternal(purchases, listener, null); } /** * Same as {@link consumeAsync}, but for multiple items at once. * @param purchases The list of PurchaseInfo objects representing the purchases to consume. * @param listener The listener to notify when the consumption operation finishes. */ public void consumeAsync(List<Purchase> purchases, OnConsumeMultiFinishedListener listener) { checkNotDisposed(); checkSetupDone("consume"); consumeAsyncInternal(purchases, null, listener); } /** * Returns a human-readable description for the given response code. * * @param code The response code * @return A human-readable string explaining the result code. * It also includes the result code numerically. */ public static String getResponseDesc(int code) { String[] iab_msgs = ("0:OK/1:User Canceled/2:Unknown/" + "3:Billing Unavailable/4:Item unavailable/" + "5:Developer Error/6:Error/7:Item Already Owned/" + "8:Item not owned").split("/"); String[] iabhelper_msgs = ("0:OK/-1001:Remote exception during initialization/" + "-1002:Bad response received/" + "-1003:Purchase signature verification failed/" + "-1004:Send intent failed/" + "-1005:User cancelled/" + "-1006:Unknown purchase response/" + "-1007:Missing token/" + "-1008:Unknown error/" + "-1009:Subscriptions not available/" + "-1010:Invalid consumption attempt").split("/"); if (code <= IABHELPER_ERROR_BASE) { int index = IABHELPER_ERROR_BASE - code; if (index >= 0 && index < iabhelper_msgs.length) return iabhelper_msgs[index]; else return String.valueOf(code) + ":Unknown IAB Helper Error"; } else if (code < 0 || code >= iab_msgs.length) return String.valueOf(code) + ":Unknown"; else return iab_msgs[code]; } // Checks that setup was done; if not, throws an exception. void checkSetupDone(String operation) { if (!mSetupDone) { logError("Illegal state for operation (" + operation + "): IAB helper is not set up."); throw new IllegalStateException("IAB helper is not set up. Can't perform operation: " + operation); } } // Workaround to bug where sometimes response codes come as Long instead of Integer int getResponseCodeFromBundle(Bundle b) { Object o = b.get(RESPONSE_CODE); if (o == null) { logDebug("Bundle with null response code, assuming OK (known issue)"); return BILLING_RESPONSE_RESULT_OK; } else if (o instanceof Integer) return ((Integer)o).intValue(); else if (o instanceof Long) return (int)((Long)o).longValue(); else { logError("Unexpected type for bundle response code."); logError(o.getClass().getName()); throw new RuntimeException("Unexpected type for bundle response code: " + o.getClass().getName()); } } // Workaround to bug where sometimes response codes come as Long instead of Integer int getResponseCodeFromIntent(Intent i) { Object o = i.getExtras().get(RESPONSE_CODE); if (o == null) { logError("Intent with no response code, assuming OK (known issue)"); return BILLING_RESPONSE_RESULT_OK; } else if (o instanceof Integer) return ((Integer)o).intValue(); else if (o instanceof Long) return (int)((Long)o).longValue(); else { logError("Unexpected type for intent response code."); logError(o.getClass().getName()); throw new RuntimeException("Unexpected type for intent response code: " + o.getClass().getName()); } } void flagStartAsync(String operation) { if (mAsyncInProgress) throw new IllegalStateException("Can't start async operation (" + operation + ") because another async operation(" + mAsyncOperation + ") is in progress."); mAsyncOperation = operation; mAsyncInProgress = true; logDebug("Starting async operation: " + operation); } void flagEndAsync() { logDebug("Ending async operation: " + mAsyncOperation); mAsyncOperation = ""; mAsyncInProgress = false; } int queryPurchases(Inventory inv, String itemType) throws JSONException, RemoteException { // Query purchases logDebug("Querying owned items, item type: " + itemType); logDebug("Package name: " + mContext.getPackageName()); boolean verificationFailed = false; String continueToken = null; do { logDebug("Calling getPurchases with continuation token: " + continueToken); Bundle ownedItems = mService.getPurchases(3, mContext.getPackageName(), itemType, continueToken); int response = getResponseCodeFromBundle(ownedItems); logDebug("Owned items response: " + String.valueOf(response)); if (response != BILLING_RESPONSE_RESULT_OK) { logDebug("getPurchases() failed: " + getResponseDesc(response)); return response; } if (!ownedItems.containsKey(RESPONSE_INAPP_ITEM_LIST) || !ownedItems.containsKey(RESPONSE_INAPP_PURCHASE_DATA_LIST) || !ownedItems.containsKey(RESPONSE_INAPP_SIGNATURE_LIST)) { logError("Bundle returned from getPurchases() doesn't contain required fields."); return IABHELPER_BAD_RESPONSE; } ArrayList<String> ownedSkus = ownedItems.getStringArrayList( RESPONSE_INAPP_ITEM_LIST); ArrayList<String> purchaseDataList = ownedItems.getStringArrayList( RESPONSE_INAPP_PURCHASE_DATA_LIST); ArrayList<String> signatureList = ownedItems.getStringArrayList( RESPONSE_INAPP_SIGNATURE_LIST); for (int i = 0; i < purchaseDataList.size(); ++i) { String purchaseData = purchaseDataList.get(i); String signature = signatureList.get(i); String sku = ownedSkus.get(i); if (Security.verifyPurchase(mSignatureBase64, purchaseData, signature)) { logDebug("Sku is owned: " + sku); Purchase purchase = new Purchase(itemType, purchaseData, signature); if (TextUtils.isEmpty(purchase.getToken())) { logWarn("BUG: empty/null token!"); logDebug("Purchase data: " + purchaseData); } // Record ownership and token inv.addPurchase(purchase); } else { logWarn("Purchase signature verification **FAILED**. Not adding item."); logDebug(" Purchase data: " + purchaseData); logDebug(" Signature: " + signature); verificationFailed = true; } } continueToken = ownedItems.getString(INAPP_CONTINUATION_TOKEN); logDebug("Continuation token: " + continueToken); } while (!TextUtils.isEmpty(continueToken)); return verificationFailed ? IABHELPER_VERIFICATION_FAILED : BILLING_RESPONSE_RESULT_OK; } int querySkuDetails(String itemType, Inventory inv, List<String> moreSkus) throws RemoteException, JSONException { logDebug("Querying SKU details."); ArrayList<String> skuList = new ArrayList<String>(); skuList.addAll(inv.getAllOwnedSkus(itemType)); if (moreSkus != null) { for (String sku : moreSkus) { if (!skuList.contains(sku)) { skuList.add(sku); } } } if (skuList.size() == 0) { logDebug("queryPrices: nothing to do because there are no SKUs."); return BILLING_RESPONSE_RESULT_OK; } Bundle querySkus = new Bundle(); querySkus.putStringArrayList(GET_SKU_DETAILS_ITEM_LIST, skuList); Bundle skuDetails = mService.getSkuDetails(3, mContext.getPackageName(), itemType, querySkus); if (!skuDetails.containsKey(RESPONSE_GET_SKU_DETAILS_LIST)) { int response = getResponseCodeFromBundle(skuDetails); if (response != BILLING_RESPONSE_RESULT_OK) { logDebug("getSkuDetails() failed: " + getResponseDesc(response)); return response; } else { logError("getSkuDetails() returned a bundle with neither an error nor a detail list."); return IABHELPER_BAD_RESPONSE; } } ArrayList<String> responseList = skuDetails.getStringArrayList( RESPONSE_GET_SKU_DETAILS_LIST); for (String thisResponse : responseList) { SkuDetails d = new SkuDetails(itemType, thisResponse); logDebug("Got sku details: " + d); inv.addSkuDetails(d); } return BILLING_RESPONSE_RESULT_OK; } void consumeAsyncInternal(final List<Purchase> purchases, final OnConsumeFinishedListener singleListener, final OnConsumeMultiFinishedListener multiListener) { final Handler handler = new Handler(); flagStartAsync("consume"); (new Thread(new Runnable() { public void run() { final List<IabResult> results = new ArrayList<IabResult>(); for (Purchase purchase : purchases) { try { consume(purchase); results.add(new IabResult(BILLING_RESPONSE_RESULT_OK, "Successful consume of sku " + purchase.getSku())); } catch (IabException ex) { results.add(ex.getResult()); } } flagEndAsync(); if (!mDisposed && singleListener != null) { handler.post(new Runnable() { public void run() { singleListener.onConsumeFinished(purchases.get(0), results.get(0)); } }); } if (!mDisposed && multiListener != null) { handler.post(new Runnable() { public void run() { multiListener.onConsumeMultiFinished(purchases, results); } }); } } })).start(); } void logDebug(String msg) { if (mDebugLog) Log.d(mDebugTag, msg); } void logError(String msg) { Log.e(mDebugTag, "In-app billing error: " + msg); } void logWarn(String msg) { Log.w(mDebugTag, "In-app billing warning: " + msg); } }
package com.fivium.scriptrunner2; import com.fivium.scriptrunner2.ex.ExParser; import com.fivium.scriptrunner2.script.ScriptExecutable; import com.fivium.scriptrunner2.script.ScriptExecutableParser; import java.io.File; import java.io.IOException; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.io.FileUtils; /** * A PatchScript represents a parsed patch script file, which is used to promote DDL and DML changes to the database. * They may also contain instructions to ScriptRunner to control how to connect to the database and when to commit or * rollback transactions.<br/><br/> * * PatchScripts are composed of one or more {@link ScriptExecutable}s which should be executed in order. A PatchScript * is uniquely identified by a faceted filename, which is parsed when this object is created. */ public class PatchScript { private final String mPatchLabel; private final int mPatchNumber; private final String mDescription; private final List<ScriptExecutable> mExecutableList; private final String mPatchFileHash; private final int mPromotionSequencePosition; private final String mOriginalPatchString; private final String mFileVersion; private static final Pattern FILENAME_PATTERN = Pattern.compile("^([A-Z]{5,})([0-9]{4,}) *\\((.+)\\) *\\.sql$"); //, Pattern.CASE_INSENSITIVE removed private static final int FILENAME_PATTERN_TYPE_GROUP = 1; private static final int FILENAME_PATTERN_NUMBER_GROUP = 2; private static final int FILENAME_PATTERN_DESCRIPTION_GROUP = 3; private static final String PRINT_STATEMENT_DIVIDER = "\n========================================\n"; /** * Prints the parsed contents of each PatchScript in the given path list to standard out. Each statement in the PatchScript * is seperated by an obvious string delimiter. Any errors encountered during the parse are also printed to standard out * and are not re-thrown. This output is for debugging purposes only and should not be processed programatically. * @param pBaseDirectory Base directory for relative path evalulation. * @param pScriptPathList List of paths to all required PathScripts. * @return True if parsing was successful, false otherwise. */ public static boolean printScriptsToStandardOut(File pBaseDirectory, List<String> pScriptPathList){ boolean lSuccess = true; for(String lPath : pScriptPathList){ String lFileContents; try { File lPatchFile = new File(lPath); if(!lPatchFile.isAbsolute()){ //If not absolute, evaluate from the base directory lPatchFile = new File(pBaseDirectory, lPath); } System.out.println("\n********** " + lPatchFile.getName() + " **********\n"); lFileContents = FileUtils.readFileToString(lPatchFile); PatchScript lPatchScript = createFromString(lPatchFile.getName(), lFileContents); System.out.println("Patch label: " + lPatchScript.getPatchLabel()); System.out.println("Patch number: " + lPatchScript.getPatchNumber()); System.out.println("Patch description: " + lPatchScript.getDescription()); for(ScriptExecutable lExec : lPatchScript.getExecutableList()){ System.out.println(PRINT_STATEMENT_DIVIDER); System.out.println(lExec.getDisplayString()); } System.out.println(PRINT_STATEMENT_DIVIDER); } catch (IOException e) { System.out.println("ERROR: Could not read PatchScript file"); System.out.println("Reason (see log for details): " + e.getMessage()); Logger.logError(e); lSuccess = false; } catch (ExParser e) { System.out.println("ERROR: PATCHSCRIPT COULD NOT BE PARSED"); System.out.println("Reason (see log for details): " + e.getMessage()); Logger.logError(e); lSuccess = false; } } return lSuccess; } /** * Constructs a new PatchScript by parsing the given file contents. * @param pFileName File name of the PatchScript. * @param pPatchContents File contents. * @return The new PatchScript. * @throws ExParser If the file contents or file name cannot be parsed. */ public static PatchScript createFromString(String pFileName, String pPatchContents) throws ExParser { return createFromString(pFileName, pPatchContents, "unavailable", "unavailable"); } /** * Constructs a new PatchScript by parsing the given file contents. * @param pFileName File name of the PatchScript. * @param pPatchContents File contents. * @param pFileHash File hash of the patch script. * @param pFileVersion Version of the patch script. * @return The new PatchScript. * @throws ExParser If the file contents or file name cannot be parsed. */ public static PatchScript createFromString(String pFileName, String pPatchContents, String pFileHash, String pFileVersion) throws ExParser { return new PatchScript(pFileName, pPatchContents, pFileHash, 0, pFileVersion); } /** * Constructs a new PatchScript by reading the contents of a PromotionFile. * @param pResolver Resolver for finding the file. * @param pPromotionFile File to be parsed. * @return The new PatchScript. * @throws IOException If the file cannot be read. * @throws ExParser If the file contents or file name cannot be parsed. */ public static PatchScript createFromPromotionFile(FileResolver pResolver, PromotionFile pPromotionFile) throws IOException, ExParser { File lFile = pResolver.resolveFile(pPromotionFile.getFilePath()); String lFileContents = FileUtils.readFileToString(lFile); return new PatchScript(lFile.getName(), lFileContents, pPromotionFile.getFileHash(), pPromotionFile.getSequencePosition(), pPromotionFile.getFileVersion()); } /** * Constructs a new PatchScript. * @param pFileName Patch file name. * @param pFileContents Contents of the file. * @param pPatchFileHash Hash of the file. * @param pPromotionSequencePosition Position within the overall promotion. * @param pFileVersion VCS version of the file. * @throws ExParser If the contents or filename cannot be parsed. */ private PatchScript(String pFileName, String pFileContents, String pPatchFileHash, int pPromotionSequencePosition, String pFileVersion) throws ExParser { //Use regex to split the filename into its component parts Matcher lMatcher = FILENAME_PATTERN.matcher(pFileName); if(lMatcher.matches()){ mPatchLabel = lMatcher.group(FILENAME_PATTERN_TYPE_GROUP); mPatchNumber = Integer.parseInt(lMatcher.group(FILENAME_PATTERN_NUMBER_GROUP)); mDescription = lMatcher.group(FILENAME_PATTERN_DESCRIPTION_GROUP); Logger.logDebug("Parsed patch filename " + pFileName + ": Patch Label = " + mPatchLabel + " Number = "+ mPatchNumber + " Description = " + mDescription); } else { throw new ExParser("Invalid patch filename '" + pFileName + "'. Expected format is 'PATCHLABEL##### (description).sql'"); } //Split the nested scripts into individual executable scripts mExecutableList = ScriptExecutableParser.parseScriptExecutables(pFileContents, false); mPatchFileHash = pPatchFileHash; mPromotionSequencePosition = pPromotionSequencePosition; mOriginalPatchString = pFileContents; mFileVersion = pFileVersion; } /** * Gets the original contents of the file used to create this PatchScript, before it was parsed. * @return Original file contents. */ public String getOriginalPatchString(){ return mOriginalPatchString; } /** * Gets this PatchScript's list of ScriptExecutables. * @return Executable list. */ public List<ScriptExecutable> getExecutableList(){ return mExecutableList; } /** * Gets the unique display name of this PatchScript. This is the label concatenated with the number. * @return Display name. */ public String getDisplayName(){ return mPatchLabel + " " + mPatchNumber; } /** * Gets the patch label, e.g. PATCHCORE, POSTPATCHCORE, etc. * @return Patch label. */ public String getPatchLabel() { return mPatchLabel; } /** * Gets the number sequence of this PatchScript. * @return Patch number. */ public int getPatchNumber() { return mPatchNumber; } /** * Gets the position of this PatchScript within its overall promotion label. * @return Promotion position. */ public int getPromotionSequencePosition(){ return mPromotionSequencePosition; } /** * Gets the file hash of this patch's original file. * @return File hash. */ public String getPatchFileHash(){ return mPatchFileHash; } /** * Gets the description of this PatchScript as specified in the parenthesised part of the file name. * @return Patch description. */ public String getDescription() { return mDescription; } /** * Gets the VCS version string for the file which created this PatchScript. * @return Version number. */ public String getFileVersion() { return mFileVersion; } }