gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* Copyright 2012 - 2015 pac4j organization Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.pac4j.play; import java.util.HashMap; import java.util.Map; import org.pac4j.core.context.BaseResponseContext; import org.pac4j.play.store.DataStore; import play.api.mvc.RequestHeader; import play.core.j.JavaHelpers$; import play.mvc.Http; import play.mvc.Http.Request; import play.mvc.Http.Response; import play.mvc.Http.Session; import play.mvc.Http.Context; /** * <p>This class is the web context for Play (used both for Java and Scala).</p> * <p>"Session objects" are managed by the defined {@link DataStore}.</p> * <p>"Request attributes" are saved/restored to/from the context.</p> * * @author Jerome Leleu * @since 1.1.0 */ public class PlayWebContext extends BaseResponseContext { protected final Context context; protected final Request request; protected final Response response; protected final Session session; protected final DataStore dataStore; public PlayWebContext(final Context context, final DataStore dataStore) { this.context = context; this.request = context.request(); this.response = context.response(); this.session = context.session(); this.dataStore = dataStore; } public PlayWebContext(final RequestHeader requestHeader, final DataStore dataStore) { this(JavaHelpers$.MODULE$.createJavaContext(requestHeader), dataStore); } /** * Get the Java session. * * @return the Java session */ public Session getJavaSession() { return session; } /** * Get the Java request. * * @return the Java request */ public Request getJavaRequest() { return request; } /** * Get the Java context. * * @return the Java context. */ public Context getJavaContext() { return this.context; } /** * Return the session storage. * * @return the session storage */ public DataStore getDataStore() { return this.dataStore; } /** * {@inheritDoc} */ @Override public String getRequestHeader(final String name) { return request.getHeader(name); } /** * {@inheritDoc} */ @Override public String getRequestMethod() { return request.method(); } /** * {@inheritDoc} */ @Override public String getRequestParameter(final String name) { final Map<String, String[]> parameters = getRequestParameters(); final String[] values = parameters.get(name); if (values != null && values.length > 0) { return values[0]; } return null; } /** * {@inheritDoc} */ @Override public Map<String, String[]> getRequestParameters() { final Http.RequestBody body = request.body(); final Map<String, String[]> formParameters; if (body != null) { formParameters = body.asFormUrlEncoded(); } else { formParameters = new HashMap<String, String[]>(); } final Map<String, String[]> urlParameters = request.queryString(); final Map<String, String[]> parameters = new HashMap<String, String[]>(); if (formParameters != null) { parameters.putAll(formParameters); } if (urlParameters != null) { parameters.putAll(urlParameters); } return parameters; } /** * {@inheritDoc} */ @Override public Object getSessionAttribute(final String key) { return dataStore.get(this, key); } /** * {@inheritDoc} */ @Override public void setSessionAttribute(final String key, final Object value) { dataStore.set(this, key, value); } /** * {@inheritDoc} */ @Override public void setResponseHeader(final String name, final String value) { response.setHeader(name, value); } /** * {@inheritDoc} */ @Override public String getServerName() { String[] split = request.host().split(":"); return split[0]; } /** * {@inheritDoc} */ @Override public int getServerPort() { String[] split = request.host().split(":"); String portStr = (split.length > 1) ? split[1] : "80"; return Integer.valueOf(portStr); } /** * {@inheritDoc} */ @Override public String getScheme() { if (request.secure()) { return "https"; } else { return "http"; } } /** * {@inheritDoc} */ @Override public String getFullRequestURL() { return getScheme() + "://" + request.host() + request.uri(); } /** * {@inheritDoc} */ public String getRemoteAddr() { return request.remoteAddress(); } /** * {@inheritDoc} */ public Object getRequestAttribute(String name) { return context.args.get(name); } /** * {@inheritDoc} */ public void setRequestAttribute(String name, Object value) { context.args.put(name, value); } /** * {@inheritDoc} */ public void invalidateSession() { dataStore.invalidate(this); } }
/* * Copyright (c) 2012 Joe Rowley * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.mobileobservinglog; import java.util.ArrayList; import java.util.List; import com.mobileobservinglog.R; import com.mobileobservinglog.support.BackupRestoreUtil; import com.mobileobservinglog.support.HtmlExporter; import com.mobileobservinglog.support.database.CatalogsDAO; import android.content.Context; import android.database.Cursor; import android.os.Bundle; import android.util.Log; import android.view.Display; import android.view.View; import android.view.ViewGroup; import android.view.WindowManager; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ListView; import android.widget.RelativeLayout; import android.widget.TextView; public class BackupRestoreScreen extends ActivityBase{ //gather resources FrameLayout body; Button export; Button backup; Button restore; List<String> catalogList; List<String> selectedItems; RelativeLayout alertModal; LinearLayout alertSelectors; LinearLayout alertListOne; LinearLayout alertListTwo; TextView alertText; Button alertOk; Button alertCancel; RelativeLayout progressLayout; ImageView progressImage; TextView progressMessage; List<String> restorableFiles; BackupRestoreUtil util; boolean asyncTaskRunning = false; @Override public void onCreate(Bundle icicle) { Log.d("JoeDebug", "BackupRestore onCreate. Current session mode is " + settingsRef.getSessionMode()); super.onCreate(icicle); customizeBrightness.setDimButtons(settingsRef.getButtonBrightness()); selectedItems = new ArrayList<String>(); //setup the layout setContentView(settingsRef.getBackupRestoreLayout()); body = (FrameLayout)findViewById(R.id.backup_restore_root); alertText = (TextView)findViewById(R.id.alert_main_text); alertOk = (Button)findViewById(R.id.alert_ok_button); alertCancel = (Button)findViewById(R.id.alert_cancel_button); alertModal = (RelativeLayout)findViewById(R.id.alert_modal); progressLayout = (RelativeLayout)findViewById(R.id.progress_modal); progressImage = (ImageView)findViewById(R.id.progress_image); progressMessage = (TextView)findViewById(R.id.progress_text); } @Override public void onPause() { super.onPause(); } @Override public void onDestroy() { super.onDestroy(); } //When we resume, we need to make sure we have the right layout set, in case the user has changed the session mode. @Override public void onResume() { Log.d("JoeDebug", "BackupRestore onResume. Current session mode is " + settingsRef.getSessionMode()); super.onResume(); setLayout(); if(util == null) { util = new BackupRestoreUtil(progressMessage, progressImage, BackupRestoreScreen.this); } } //Used by the Toggle Mode menu item method in ActivityBase. Reset the layout and force the redraw @Override public void setLayout(){ setContentView(settingsRef.getBackupRestoreLayout()); super.setLayout(); findButtonsAddListeners(); findModalElements(); prepareListView(); body.postInvalidate(); if(asyncTaskRunning) { prepProgressModal(); } } protected void findButtonsAddListeners() { export = (Button)findViewById(R.id.export_pdf_button); backup = (Button)findViewById(R.id.backup_data_button); restore = (Button)findViewById(R.id.restore_data_button); export.setOnClickListener(exportPdfs); backup.setOnClickListener(backupData); restore.setOnClickListener(restoreData); } private void findModalElements() { alertModal = (RelativeLayout)findViewById(R.id.alert_modal); alertText = (TextView)findViewById(R.id.modal_header); alertOk = (Button)findViewById(R.id.alert_ok_button); alertCancel = (Button)findViewById(R.id.alert_cancel_button); alertSelectors = (LinearLayout)findViewById(R.id.selectors_container); alertListOne = (LinearLayout)findViewById(R.id.object_selector_modal_list_layout_one); alertListTwo = (LinearLayout)findViewById(R.id.object_selector_modal_list_layout_two); progressLayout = (RelativeLayout)findViewById(R.id.progress_modal); progressImage = (ImageView)findViewById(R.id.progress_image); progressMessage = (TextView)findViewById(R.id.progress_text); } private final Button.OnClickListener exportPdfs = new Button.OnClickListener() { public void onClick(View view) { if(selectedItems.size() > 0) { HtmlExporter exporter = new HtmlExporter(progressMessage, progressImage, BackupRestoreScreen.this); exporter.exportData(selectedItems); } else { prepForModal(); alertText.setVisibility(View.VISIBLE); alertOk.setVisibility(View.VISIBLE); alertModal.setVisibility(View.VISIBLE); alertText.setText("Please select at least one catalog to export."); alertOk.setOnClickListener(dismissModal);; } } }; private final Button.OnClickListener backupData = new Button.OnClickListener() { public void onClick(View view) { util.backupData(); } }; private final Button.OnClickListener restoreData = new Button.OnClickListener() { public void onClick(View view) { prepForModal(); alertText.setVisibility(View.VISIBLE); alertOk.setVisibility(View.VISIBLE); alertCancel.setVisibility(View.VISIBLE); alertModal.setVisibility(View.VISIBLE); alertText.setText("WARNING: This action will over-write all existing log data.\n\nContinue?\n"); alertOk.setOnClickListener(warningAccepted); alertCancel.setOnClickListener(dismissModal); } }; private final Button.OnClickListener warningAccepted = new Button.OnClickListener() { public void onClick(View view) { restorableFiles = util.findRestorableFiles(); prepForModal(); alertText.setVisibility(View.VISIBLE); alertModal.setVisibility(View.VISIBLE); if(restorableFiles.size() > 0) { alertCancel.setVisibility(View.VISIBLE); alertCancel.setOnClickListener(dismissModal); alertText.setText("Which file would you like to backup from?"); alertListOne.setVisibility(View.VISIBLE); findViewById(R.id.object_selector_modal_list_one_header).setVisibility(View.GONE); ListView modalList = (ListView)findViewById(R.id.modal_list_one); modalList.setAdapter(new ArrayAdapter<String>(BackupRestoreScreen.this, settingsRef.getSearchModalListLayout(), R.id.filter_option, restorableFiles)); modalList.setOnItemClickListener(fileSelected); Display display = ((WindowManager)getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay(); int windowHeight = display.getHeight(); RelativeLayout.LayoutParams listOneParams = (RelativeLayout.LayoutParams)alertListOne.getLayoutParams(); if(listOneParams.height > (int) windowHeight * 0.6f) { listOneParams.height = (int) (windowHeight * 0.6f); alertListOne.setLayoutParams(listOneParams); } } else { alertOk.setVisibility(View.VISIBLE); String defaultDirectory = util.getDefaultDirectoryPath(); alertText.setText("There were no restorable .maol files found in the default directory. \n\n" + "If you have a backup (.maol) file please place it in the directory " + defaultDirectory); alertOk.setOnClickListener(dismissModal); } } }; protected final AdapterView.OnItemClickListener fileSelected = new AdapterView.OnItemClickListener() { public void onItemClick(AdapterView<?> adapter, View view, int position, long id) { util.restoreData(restorableFiles.get(position)); } }; /** * Internal method to handle preparation of the list view upon creation or to be called by setLayout when session mode changes or onResume. */ protected void prepareListView() { catalogList = new ArrayList<String>(); //Get the list of saved telescopes and populate the list CatalogsDAO db = new CatalogsDAO(this); Cursor catalogs = db.getAvailableCatalogs(); catalogs.moveToFirst(); Log.d("JoeDebug", "cursor size is " + catalogs.getCount()); for (int i = 0; i < catalogs.getCount(); i++) { String installed = catalogs.getString(1); if (installed.equals("Yes")){ String name = catalogs.getString(0); catalogList.add(name); } catalogs.moveToNext(); } catalogs.close(); db.close(); if (catalogList.size() == 0){ TextView nothingLeft = (TextView)findViewById(R.id.nothing_here); nothingLeft.setVisibility(View.VISIBLE); export.setEnabled(false); backup.setEnabled(false); } else{ Log.d("JoeTest", "List size is " + catalogList.size()); setListAdapter(new CatalogAdapter(this, settingsRef.getBackupRestoreListLayout(), catalogList)); } } /** * Take action on each of the list items when clicked. We need to swap out the image and add/remove it from the selected list */ @Override protected void onListItemClick(ListView l, View v, int position, long id) { TextView name = (TextView) v.findViewById(R.id.catalog_name); String catalog = name.getText().toString(); ImageView checked = (ImageView) v.findViewById(R.id.checkbox); if (!selectedItems.contains(catalog)){ //This item is not currently checked selectedItems.add(catalog); checked.setImageResource(settingsRef.getCheckbox_Selected()); } else{ selectedItems.remove(catalog); checked.setImageResource(settingsRef.getCheckbox_Unselected()); } } /** * Helper method to dim out the background and make the list view unclickable in preparation to display a modal */ protected void prepForModal() { RelativeLayout blackOutLayer = (RelativeLayout)findViewById(R.id.settings_fog); RelativeLayout mainBackLayer = (RelativeLayout)findViewById(R.id.backup_restore_main); ListView listView = getListView(); mainBackLayer.setEnabled(false); listView.setEnabled(false); export.setEnabled(false); backup.setEnabled(false); restore.setEnabled(false); blackOutLayer.setVisibility(View.VISIBLE); progressLayout.setVisibility(View.GONE); alertSelectors.setVisibility(View.GONE); alertListOne.setVisibility(View.GONE); alertListTwo.setVisibility(View.GONE); alertModal.setVisibility(View.GONE); alertText.setVisibility(View.GONE); alertOk.setVisibility(View.GONE); alertCancel.setVisibility(View.GONE); } protected void tearDownModal(){ RelativeLayout blackOutLayer = (RelativeLayout)findViewById(R.id.settings_fog); RelativeLayout mainBackLayer = (RelativeLayout)findViewById(R.id.backup_restore_main); ListView listView = getListView(); mainBackLayer.setEnabled(true); listView.setEnabled(true); export.setEnabled(true); backup.setEnabled(true); restore.setEnabled(true); blackOutLayer.setVisibility(View.INVISIBLE); alertModal.setVisibility(View.INVISIBLE); progressLayout.setVisibility(View.INVISIBLE); } protected final Button.OnClickListener dismissModal = new Button.OnClickListener() { public void onClick(View view){ tearDownModal(); } }; /** * Take our existing alert modal and modify the layout to provide a progress indicator */ public void prepProgressModal(){ Log.d("JoeTest", "prepProgressModal called"); prepForModal(); progressImage.setVisibility(View.VISIBLE); progressMessage.setVisibility(View.VISIBLE); progressLayout.setVisibility(View.VISIBLE); } public void showFailureMessage(String message){ asyncTaskRunning = false; prepForModal(); alertModal.setVisibility(View.VISIBLE); alertText.setText(message); alertOk.setOnClickListener(dismissModal); alertText.setVisibility(View.VISIBLE); alertOk.setVisibility(View.VISIBLE); } public void showSuccessMessage(String message){ asyncTaskRunning = false; prepForModal(); alertModal.setVisibility(View.VISIBLE); alertText.setText(message); alertOk.setOnClickListener(dismissModal); alertText.setVisibility(View.VISIBLE); alertOk.setVisibility(View.VISIBLE); } public void setAsyncRunning(boolean running) { asyncTaskRunning = running; } ////////////////////////////////////// // Catalog List Inflation Utilities // ////////////////////////////////////// class CatalogAdapter extends ArrayAdapter<String>{ int listLayout; CatalogAdapter(Context context, int listLayout, List<String> list){ super(context, listLayout, list); this.listLayout = listLayout; } @Override public View getView(int position, View convertView, ViewGroup parent){ CatalogWrapper wrapper = null; if (convertView == null){ convertView = getLayoutInflater().inflate(listLayout, null); wrapper = new CatalogWrapper(convertView); convertView.setTag(wrapper); } else{ wrapper = (CatalogWrapper)convertView.getTag(); } wrapper.populateFrom(getItem(position)); return convertView; } } class CatalogWrapper{ private TextView name = null; private ImageView icon = null; private View row = null; CatalogWrapper(View row){ this.row = row; } TextView getName(){ if (name == null){ name = (TextView)row.findViewById(R.id.catalog_name); } return name; } ImageView getIcon(){ if (icon == null){ icon = (ImageView)row.findViewById(R.id.checkbox); } return icon; } void populateFrom(String catalog){ getName().setText(catalog); getIcon().setImageResource(settingsRef.getCheckbox_Unselected()); } } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.daemon.impl.quickfix; import com.intellij.codeInsight.*; import com.intellij.codeInsight.completion.proc.VariablesProcessor; import com.intellij.codeInsight.daemon.JavaErrorMessages; import com.intellij.codeInsight.daemon.QuickFixBundle; import com.intellij.codeInsight.generation.OverrideImplementUtil; import com.intellij.codeInsight.generation.PsiGenerationInfo; import com.intellij.codeInsight.intention.CreateFromUsage; import com.intellij.codeInsight.intention.IntentionAction; import com.intellij.codeInsight.intention.impl.CreateClassDialog; import com.intellij.codeInsight.lookup.LookupElement; import com.intellij.codeInsight.lookup.LookupElementBuilder; import com.intellij.codeInsight.template.*; import com.intellij.ide.fileTemplates.FileTemplate; import com.intellij.ide.fileTemplates.FileTemplateManager; import com.intellij.ide.fileTemplates.FileTemplateUtil; import com.intellij.ide.fileTemplates.JavaTemplateUtil; import com.intellij.ide.util.EditorHelper; import com.intellij.lang.java.JavaLanguage; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.application.WriteAction; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.EditorModificationUtil; import com.intellij.openapi.editor.ScrollType; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.fileTypes.FileTypeManager; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleUtilCore; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.*; import com.intellij.psi.codeStyle.CodeStyleManager; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.codeStyle.SuggestedNameInfo; import com.intellij.psi.codeStyle.VariableKind; import com.intellij.psi.scope.util.PsiScopesUtil; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.PsiShortNamesCache; import com.intellij.psi.search.searches.ClassInheritorsSearch; import com.intellij.psi.statistics.JavaStatisticsManager; import com.intellij.psi.util.ProximityLocation; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiTypesUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.psi.util.proximity.PsiProximityComparator; import com.intellij.refactoring.util.RefactoringUtil; import com.intellij.util.ArrayUtil; import com.intellij.util.IncorrectOperationException; import com.intellij.util.ObjectUtils; import com.intellij.util.containers.ContainerUtil; import kotlin.collections.ArraysKt; import kotlin.collections.CollectionsKt; import kotlin.jvm.functions.Function0; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; /** * @author mike */ public class CreateFromUsageUtils { private static final Logger LOG = Logger.getInstance( "#com.intellij.codeInsight.daemon.impl.quickfix.CreateFromUsageUtils"); private static final int MAX_GUESSED_MEMBERS_COUNT = 10; private static final int MAX_RAW_GUESSED_MEMBERS_COUNT = 2 * MAX_GUESSED_MEMBERS_COUNT; static boolean isValidReference(PsiReference reference, boolean unresolvedOnly) { if (!(reference instanceof PsiJavaReference)) return false; JavaResolveResult[] results = ((PsiJavaReference)reference).multiResolve(true); if(results.length == 0) return false; if (!unresolvedOnly) { for (JavaResolveResult result : results) { if (!result.isValidResult()) return false; if (result.getElement() instanceof PsiPackage) return false; } } return true; } static boolean isValidMethodReference(PsiReference reference, PsiMethodCallExpression call) { if (!(reference instanceof PsiJavaReference)) return false; try { JavaResolveResult candidate = ((PsiJavaReference) reference).advancedResolve(true); PsiElement result = candidate.getElement(); return result instanceof PsiMethod && PsiUtil.isApplicable((PsiMethod)result, candidate.getSubstitutor(), call.getArgumentList()); } catch (ClassCastException cce) { // rear case return false; } } static boolean shouldCreateConstructor(PsiClass targetClass, PsiExpressionList argList, PsiMethod candidate) { if (argList == null) return false; if (candidate == null) { return targetClass != null && !targetClass.isInterface() && !(targetClass instanceof PsiTypeParameter) && !(argList.getExpressions().length == 0 && targetClass.getConstructors().length == 0); } else { return !PsiUtil.isApplicable(candidate, PsiSubstitutor.EMPTY, argList); } } public static void setupMethodBody(@NotNull PsiMethod method) throws IncorrectOperationException { PsiClass aClass = method.getContainingClass(); setupMethodBody(method, aClass); } public static void setupMethodBody(final PsiMethod method, final PsiClass aClass) throws IncorrectOperationException { FileTemplate template = FileTemplateManager.getInstance(method.getProject()).getCodeTemplate(JavaTemplateUtil.TEMPLATE_FROM_USAGE_METHOD_BODY); setupMethodBody(method, aClass, template); } public static void setupMethodBody(final PsiMethod method, final PsiClass aClass, final FileTemplate template) throws IncorrectOperationException { PsiType returnType = method.getReturnType(); if (returnType == null) { returnType = PsiType.VOID; } JVMElementFactory factory = JVMElementFactories.getFactory(aClass.getLanguage(), aClass.getProject()); LOG.assertTrue(!aClass.isInterface() || PsiUtil.isLanguageLevel8OrHigher(method) || method.getLanguage() != JavaLanguage.INSTANCE, "Interface bodies should be already set up"); FileType fileType = FileTypeManager.getInstance().getFileTypeByExtension(template.getExtension()); Properties properties = new Properties(); properties.setProperty(FileTemplate.ATTRIBUTE_RETURN_TYPE, returnType.getPresentableText()); properties.setProperty(FileTemplate.ATTRIBUTE_DEFAULT_RETURN_VALUE, PsiTypesUtil.getDefaultValueOfType(returnType)); JavaTemplateUtil.setClassAndMethodNameProperties(properties, aClass, method); @NonNls String methodText; CodeStyleManager csManager = CodeStyleManager.getInstance(method.getProject()); try { String bodyText = template.getText(properties); if (!bodyText.isEmpty()) bodyText += "\n"; methodText = returnType.getPresentableText() + " foo () {\n" + bodyText + "}"; methodText = FileTemplateUtil.indent(methodText, method.getProject(), fileType); } catch (ProcessCanceledException e) { throw e; } catch (Exception e) { throw new IncorrectOperationException("Failed to parse file template", (Throwable)e); } if (methodText != null) { PsiMethod m; try { m = factory.createMethodFromText(methodText, aClass); } catch (IncorrectOperationException e) { ApplicationManager.getApplication().invokeLater( () -> Messages.showErrorDialog(QuickFixBundle.message("new.method.body.template.error.text"), QuickFixBundle.message("new.method.body.template.error.title"))); return; } PsiElement newBody = m.getBody(); LOG.assertTrue(newBody != null); PsiElement oldBody = method.getBody(); if (oldBody == null) { PsiElement last = method.getLastChild(); if (last instanceof PsiErrorElement && JavaErrorMessages.message("expected.lbrace.or.semicolon").equals(((PsiErrorElement)last).getErrorDescription())) { oldBody = last; } } if (oldBody != null) { oldBody.replace(newBody); } else { method.add(newBody); } csManager.reformat(method); } } public static void setupEditor(PsiMethod method, final Editor newEditor) { PsiCodeBlock body = method.getBody(); if (body != null) { PsiElement l = PsiTreeUtil.skipWhitespacesForward(body.getLBrace()); PsiElement r = PsiTreeUtil.skipWhitespacesBackward(body.getRBrace()); if (l != null && r != null) { int start = l.getTextRange().getStartOffset(); int end = r.getTextRange().getEndOffset(); newEditor.getCaretModel().moveToOffset(Math.max(start, end)); if (end < start) { newEditor.getCaretModel().moveToOffset(end + 1); CodeStyleManager styleManager = CodeStyleManager.getInstance(method.getProject()); PsiFile containingFile = method.getContainingFile(); final String lineIndent = styleManager.getLineIndent(containingFile, Math.min(start, end)); PsiDocumentManager manager = PsiDocumentManager.getInstance(method.getProject()); manager.doPostponedOperationsAndUnblockDocument(manager.getDocument(containingFile)); EditorModificationUtil.insertStringAtCaret(newEditor, lineIndent); EditorModificationUtil.insertStringAtCaret(newEditor, "\n", false, false); } else { //correct position caret for groovy and java methods final PsiGenerationInfo<PsiMethod> info = OverrideImplementUtil.createGenerationInfo(method); info.positionCaret(newEditor, true); } newEditor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE); } } } static void setupMethodParameters(PsiMethod method, TemplateBuilder builder, PsiExpressionList argumentList, PsiSubstitutor substitutor) throws IncorrectOperationException { if (argumentList == null) return; PsiExpression[] args = argumentList.getExpressions(); setupMethodParameters(method, builder, argumentList, substitutor, args); } public static void setupMethodParameters(final PsiMethod method, final TemplateBuilder builder, final PsiElement contextElement, final PsiSubstitutor substitutor, final PsiExpression[] arguments) { setupMethodParameters(method, builder, contextElement, substitutor, ContainerUtil.map2List(arguments, Pair.createFunction(null))); } static List<CreateFromUsage.ParameterInfo> getParameterInfos( final PsiElement context, final List<Pair<PsiExpression, PsiType>> arguments ) { //255 is the maximum number of method parameters int parameterCount = Math.min(arguments.size(), 255); ArrayList<CreateFromUsage.ParameterInfo> parameterInfos = new ArrayList<>(parameterCount); PsiManager psiManager = context.getManager(); GlobalSearchScope resolveScope = context.getResolveScope(); for (int i = 0; i < parameterCount; i++) { Pair<PsiExpression, PsiType> arg = arguments.get(i); PsiExpression exp = arg.first; PsiType argType = exp == null ? arg.second : RefactoringUtil.getTypeByExpression(exp); SuggestedNameInfo suggestedInfo = JavaCodeStyleManager.getInstance(psiManager.getProject()).suggestVariableName( VariableKind.PARAMETER, null, exp, argType); @NonNls String[] names = suggestedInfo.names; //TODO: callback about used name if (names.length == 0) { names = new String[]{"p" + i}; } if (argType == null || PsiType.NULL.equals(argType) || LambdaUtil.notInferredType(argType)) { argType = PsiType.getJavaLangObject(psiManager, resolveScope); } else if (argType instanceof PsiDisjunctionType) { argType = ((PsiDisjunctionType)argType).getLeastUpperBound(); } else if (argType instanceof PsiWildcardType) { argType = ((PsiWildcardType)argType).isBounded() ? ((PsiWildcardType)argType).getBound() : PsiType.getJavaLangObject(psiManager, resolveScope); } ExpectedTypeInfo info = ExpectedTypesProvider.createInfo(argType, ExpectedTypeInfo.TYPE_OR_SUPERTYPE, argType, TailType.NONE); CreateFromUsage.TypeInfo parameterTypeInfo = new CreateFromUsage.TypeInfo(Collections.singletonList(info)); CreateFromUsage.ParameterInfo parameterInfo = new CreateFromUsage.ParameterInfo(parameterTypeInfo, ArraysKt.toList(names)); parameterInfos.add(parameterInfo); } return parameterInfos; } static void setupMethodParameters(final PsiMethod method, final TemplateBuilder builder, final PsiElement contextElement, final PsiSubstitutor substitutor, final List<Pair<PsiExpression, PsiType>> arguments) throws IncorrectOperationException { PsiManager psiManager = method.getManager(); JVMElementFactory factory = JVMElementFactories.getFactory(method.getLanguage(), method.getProject()); if (factory == null) return; PsiParameterList parameterList = method.getParameterList(); GuessTypeParameters guesser = new GuessTypeParameters(JavaPsiFacade.getElementFactory(method.getProject())); CodeStyleManager codeStyleManager = CodeStyleManager.getInstance(psiManager); final PsiClass containingClass = method.getContainingClass(); final boolean isInterface = containingClass != null && containingClass.isInterface(); List<CreateFromUsage.ParameterInfo> parameterInfos = getParameterInfos(method, arguments); for (int i = 0; i < parameterInfos.size(); i++) { CreateFromUsage.ParameterInfo parameterInfo = parameterInfos.get(i); ExpectedTypeInfo info = (ExpectedTypeInfo) CollectionsKt.first(parameterInfo.getTypeInfo().getTypeConstraints()); List<String> suggestedNames = parameterInfo.getSuggestedNames(); PsiParameter parameter; if (parameterList.getParametersCount() <= i) { PsiParameter param = factory.createParameter(suggestedNames.get(0), info.getType()); if (isInterface) { PsiUtil.setModifierProperty(param, PsiModifier.FINAL, false); } parameter = codeStyleManager.performActionWithFormatterDisabled(() -> (PsiParameter) parameterList.add(param)); } else { parameter = parameterList.getParameters()[i]; } PsiElement context = PsiTreeUtil.getParentOfType(contextElement, PsiClass.class, PsiMethod.class); guesser.setupTypeElement(parameter.getTypeElement(), new ExpectedTypeInfo[]{info}, substitutor, builder, context, containingClass); Expression expression = new ParameterNameExpression(ArrayUtil.toStringArray(suggestedNames)); builder.replaceElement(parameter.getNameIdentifier(), expression); } } @Nullable public static PsiClass createClass(final PsiJavaCodeReferenceElement referenceElement, final CreateClassKind classKind, final String superClassName) { assert !ApplicationManager.getApplication().isWriteAccessAllowed() : "You must not run createClass() from under write action"; final String name = referenceElement.getReferenceName(); String qualifierName; final PsiElement qualifierElement; PsiElement qualifier = referenceElement.getQualifier(); if (qualifier instanceof PsiJavaCodeReferenceElement) { qualifierName = ((PsiJavaCodeReferenceElement)qualifier).getQualifiedName(); qualifierElement = ((PsiJavaCodeReferenceElement)qualifier).resolve(); if (qualifierElement instanceof PsiClass) { if (!FileModificationService.getInstance().preparePsiElementForWrite(qualifierElement)) return null; return WriteAction.compute(() -> createClassInQualifier((PsiClass)qualifierElement, classKind, name, referenceElement)); } } else { qualifierName = null; qualifierElement = null; } final PsiManager manager = referenceElement.getManager(); final PsiFile sourceFile = referenceElement.getContainingFile(); final Module module = ModuleUtilCore.findModuleForPsiElement(sourceFile); if (qualifierName == null) { PsiPackage aPackage = findTargetPackage(qualifierElement, manager, sourceFile); if (aPackage == null) return null; qualifierName = aPackage.getQualifiedName(); } final PsiDirectory targetDirectory; if (!ApplicationManager.getApplication().isUnitTestMode()) { Project project = manager.getProject(); String title = QuickFixBundle.message("create.class.title", StringUtil.capitalize(classKind.getDescription())); CreateClassDialog dialog = new CreateClassDialog(project, title, name, qualifierName, classKind, false, module){ @Override protected boolean reportBaseInSourceSelectionInTest() { return true; } }; dialog.show(); if (dialog.getExitCode() != DialogWrapper.OK_EXIT_CODE) return null; targetDirectory = dialog.getTargetDirectory(); if (targetDirectory == null) return null; } else { targetDirectory = null; } return createClass(classKind, targetDirectory, name, manager, referenceElement, sourceFile, superClassName); } @Nullable private static PsiPackage findTargetPackage(PsiElement qualifierElement, PsiManager manager, PsiFile sourceFile) { PsiPackage aPackage = null; if (qualifierElement instanceof PsiPackage) { aPackage = (PsiPackage)qualifierElement; } else { final PsiDirectory directory = sourceFile.getContainingDirectory(); if (directory != null) { aPackage = JavaDirectoryService.getInstance().getPackage(directory); } if (aPackage == null) { aPackage = JavaPsiFacade.getInstance(manager.getProject()).findPackage(""); } } if (aPackage == null) return null; return aPackage; } private static PsiClass createClassInQualifier(PsiClass psiClass, CreateClassKind classKind, String name, PsiJavaCodeReferenceElement referenceElement) { PsiManager manager = psiClass.getManager(); PsiElementFactory elementFactory = JavaPsiFacade.getInstance(manager.getProject()).getElementFactory(); PsiClass result = classKind == CreateClassKind.INTERFACE ? elementFactory.createInterface(name) : classKind == CreateClassKind.CLASS ? elementFactory.createClass(name) : classKind == CreateClassKind.ANNOTATION ? elementFactory.createAnnotationType(name) : elementFactory.createEnum(name); CreateFromUsageBaseFix.setupGenericParameters(result, referenceElement); result = (PsiClass)CodeStyleManager.getInstance(manager.getProject()).reformat(result); return (PsiClass) psiClass.add(result); } public static PsiClass createClass(final CreateClassKind classKind, final PsiDirectory directory, final String name, final PsiManager manager, @NotNull final PsiElement contextElement, final PsiFile sourceFile, final String superClassName) { final JavaPsiFacade facade = JavaPsiFacade.getInstance(manager.getProject()); final PsiElementFactory factory = facade.getElementFactory(); return WriteAction.compute(() -> { try { PsiClass targetClass; if (directory != null) { try { if (classKind == CreateClassKind.INTERFACE) { targetClass = JavaDirectoryService.getInstance().createInterface(directory, name); } else if (classKind == CreateClassKind.CLASS) { targetClass = JavaDirectoryService.getInstance().createClass(directory, name); } else if (classKind == CreateClassKind.ENUM) { targetClass = JavaDirectoryService.getInstance().createEnum(directory, name); } else if (classKind == CreateClassKind.ANNOTATION) { targetClass = JavaDirectoryService.getInstance().createAnnotationType(directory, name); } else { LOG.error("Unknown kind of a class to create"); return null; } } catch (final IncorrectOperationException e) { scheduleFileOrPackageCreationFailedMessageBox(e, name, directory, false); return null; } if (!facade.getResolveHelper().isAccessible(targetClass, contextElement, null)) { PsiUtil.setModifierProperty(targetClass, PsiModifier.PUBLIC, true); } } else { //tests PsiClass aClass; if (classKind == CreateClassKind.INTERFACE) { aClass = factory.createInterface(name); } else if (classKind == CreateClassKind.CLASS) { aClass = factory.createClass(name); } else if (classKind == CreateClassKind.ENUM) { aClass = factory.createEnum(name); } else if (classKind == CreateClassKind.ANNOTATION) { aClass = factory.createAnnotationType(name); } else { LOG.error("Unknown kind of a class to create"); return null; } targetClass = (PsiClass)sourceFile.add(aClass); } if (superClassName != null && (classKind != CreateClassKind.ENUM || !superClassName.equals(CommonClassNames.JAVA_LANG_ENUM))) { setupSuperClassReference(targetClass, superClassName); } if (contextElement instanceof PsiJavaCodeReferenceElement) { CreateFromUsageBaseFix.setupGenericParameters(targetClass, (PsiJavaCodeReferenceElement)contextElement); } return targetClass; } catch (IncorrectOperationException e) { LOG.error(e); return null; } }); } public static void setupSuperClassReference(PsiClass targetClass, String superClassName) { JavaPsiFacade facade = JavaPsiFacade.getInstance(targetClass.getProject()); PsiElementFactory factory = facade.getElementFactory(); final PsiClass superClass = facade.findClass(superClassName, targetClass.getResolveScope()); final PsiJavaCodeReferenceElement superClassReference = factory.createReferenceElementByFQClassName(superClassName, targetClass.getResolveScope()); final PsiReferenceList list = targetClass.isInterface() || superClass == null || !superClass.isInterface() ? targetClass.getExtendsList() : targetClass.getImplementsList(); list.add(superClassReference); } public static void scheduleFileOrPackageCreationFailedMessageBox(final IncorrectOperationException e, final String name, final PsiDirectory directory, final boolean isPackage) { ApplicationManager.getApplication().invokeLater(() -> Messages.showErrorDialog(QuickFixBundle.message( isPackage ? "cannot.create.java.package.error.text" : "cannot.create.java.file.error.text", name, directory.getVirtualFile().getName(), e.getLocalizedMessage()), QuickFixBundle.message( isPackage ? "cannot.create.java.package.error.title" : "cannot.create.java.file.error.title"))); } @SafeVarargs @NotNull public static PsiReferenceExpression[] collectExpressions(final PsiExpression expression, @NotNull Class<? extends PsiElement>... scopes) { PsiElement parent = PsiTreeUtil.getParentOfType(expression, scopes); final List<PsiReferenceExpression> result = new ArrayList<>(); JavaRecursiveElementWalkingVisitor visitor = new JavaRecursiveElementWalkingVisitor() { @Override public void visitReferenceExpression(PsiReferenceExpression expr) { if (expression instanceof PsiReferenceExpression) { if (expr.textMatches(expression) && !isValidReference(expr, false)) { result.add(expr); } } visitElement(expr); } @Override public void visitMethodCallExpression(PsiMethodCallExpression expr) { if (expression instanceof PsiMethodCallExpression) { PsiReferenceExpression methodExpression = expr.getMethodExpression(); if (methodExpression.textMatches(((PsiMethodCallExpression) expression).getMethodExpression())) { result.add(expr.getMethodExpression()); } } visitElement(expr); } }; if (parent != null) { parent.accept(visitor); } return result.toArray(new PsiReferenceExpression[result.size()]); } @NotNull static PsiVariable[] guessMatchingVariables(final PsiExpression expression) { List<ExpectedTypeInfo[]> typesList = new ArrayList<>(); List<String> expectedMethodNames = new ArrayList<>(); List<String> expectedFieldNames = new ArrayList<>(); getExpectedInformation(expression, typesList, expectedMethodNames, expectedFieldNames); final List<PsiVariable> list = new ArrayList<>(); VariablesProcessor varproc = new VariablesProcessor("", true, list){ @Override public boolean execute(@NotNull PsiElement element, @NotNull ResolveState state) { if(!(element instanceof PsiField) || JavaPsiFacade.getInstance(element.getProject()).getResolveHelper().isAccessible((PsiField)element, expression, null)) { return super.execute(element, state); } return true; } }; PsiScopesUtil.treeWalkUp(varproc, expression, null); PsiVariable[] allVars = varproc.getResultsAsArray(); ExpectedTypeInfo[] infos = ExpectedTypeUtil.intersect(typesList); List<PsiVariable> result = new ArrayList<>(); nextVar: for (PsiVariable variable : allVars) { PsiType varType = variable.getType(); boolean matched = infos.length == 0; for (ExpectedTypeInfo info : infos) { if (ExpectedTypeUtil.matches(varType, info)) { matched = true; break; } } if (matched) { if (!expectedFieldNames.isEmpty() && !expectedMethodNames.isEmpty()) { if (!(varType instanceof PsiClassType)) continue; PsiClass aClass = ((PsiClassType)varType).resolve(); if (aClass == null) continue; for (String name : expectedFieldNames) { if (aClass.findFieldByName(name, true) == null) continue nextVar; } for (String name : expectedMethodNames) { PsiMethod[] methods = aClass.findMethodsByName(name, true); if (methods.length == 0) continue nextVar; } } result.add(variable); } } return result.toArray(new PsiVariable[result.size()]); } private static void getExpectedInformation(final PsiExpression expression, List<ExpectedTypeInfo[]> types, List<String> expectedMethodNames, List<String> expectedFieldNames) { Comparator<ExpectedTypeInfo> expectedTypesComparator = (o1, o2) -> compareExpectedTypes(o1, o2, expression); for (PsiExpression expr : collectExpressions(expression, PsiMember.class, PsiFile.class)) { PsiElement parent = expr.getParent(); if (!(parent instanceof PsiReferenceExpression)) { boolean isAssignmentToFunctionalExpression = PsiUtil.isOnAssignmentLeftHand(expr) && ((PsiAssignmentExpression)PsiUtil.skipParenthesizedExprUp(parent)).getRExpression() instanceof PsiFunctionalExpression; PsiExpressionList expressionList = ObjectUtils .tryCast(PsiUtil.skipParenthesizedExprUp(isAssignmentToFunctionalExpression ? parent.getParent() : parent), PsiExpressionList.class); boolean forCompletion = expressionList != null || parent.getParent() instanceof PsiPolyadicExpression; ExpectedTypeInfo[] someExpectedTypes = ExpectedTypesProvider.getExpectedTypes(expr, forCompletion); if (someExpectedTypes.length > 0) { Comparator<ExpectedTypeInfo> comparator = expectedTypesComparator; if (expressionList != null) { int argCount = expressionList.getExpressions().length; Comparator<ExpectedTypeInfo> mostSuitableMethodComparator = Comparator.comparingInt(typeInfo -> typeInfo.getCalledMethod().getParameterList().getParametersCount() == argCount ? 0 : 1); comparator = mostSuitableMethodComparator.thenComparing(comparator); } Arrays.sort(someExpectedTypes, comparator); types.add(someExpectedTypes); } continue; } String refName = ((PsiReferenceExpression)parent).getReferenceName(); if (refName == null) { continue; } PsiElement pparent = parent.getParent(); if (pparent instanceof PsiMethodCallExpression) { expectedMethodNames.add(refName); if (refName.equals("equals")) { ExpectedTypeInfo[] someExpectedTypes = equalsExpectedTypes((PsiMethodCallExpression)pparent); if (someExpectedTypes.length > 0) { Arrays.sort(someExpectedTypes, expectedTypesComparator); types.add(someExpectedTypes); } } continue; } if (pparent instanceof PsiReferenceExpression || pparent instanceof PsiVariable || pparent instanceof PsiExpression) { expectedFieldNames.add(refName); } } } private static int compareExpectedTypes(ExpectedTypeInfo o1, ExpectedTypeInfo o2, PsiExpression expression) { PsiClass c1 = PsiUtil.resolveClassInType(o1.getDefaultType()); PsiClass c2 = PsiUtil.resolveClassInType(o2.getDefaultType()); if (c1 == null && c2 == null) return 0; if (c1 == null || c2 == null) return c1 == null ? -1 : 1; return compareMembers(c1, c2, expression); } @NotNull private static ExpectedTypeInfo[] equalsExpectedTypes(PsiMethodCallExpression methodCall) { final PsiType[] argumentTypes = methodCall.getArgumentList().getExpressionTypes(); if (argumentTypes.length != 1) { return ExpectedTypeInfo.EMPTY_ARRAY; } PsiType type = argumentTypes[0]; if (type instanceof PsiPrimitiveType) { type = ((PsiPrimitiveType)type).getBoxedType(methodCall); } if (type == null) return ExpectedTypeInfo.EMPTY_ARRAY; return new ExpectedTypeInfo[]{ExpectedTypesProvider.createInfo(type, ExpectedTypeInfo.TYPE_STRICTLY, type, TailType.NONE)}; } @NotNull static ExpectedTypeInfo[] guessExpectedTypes(@NotNull PsiExpression expression, boolean allowVoidType) { PsiManager manager = expression.getManager(); GlobalSearchScope resolveScope = expression.getResolveScope(); List<ExpectedTypeInfo[]> typesList = new ArrayList<>(); List<String> expectedMethodNames = new ArrayList<>(); List<String> expectedFieldNames = new ArrayList<>(); getExpectedInformation(expression, typesList, expectedMethodNames, expectedFieldNames); if (typesList.size() == 1 && (!expectedFieldNames.isEmpty() || !expectedMethodNames.isEmpty())) { ExpectedTypeInfo[] infos = typesList.get(0); if (infos.length == 1 && infos[0].getKind() == ExpectedTypeInfo.TYPE_OR_SUBTYPE && infos[0].getType().equals(PsiType.getJavaLangObject(manager, resolveScope))) { typesList.clear(); } } if (typesList.isEmpty()) { final JavaPsiFacade facade = JavaPsiFacade.getInstance(expression.getProject()); final PsiShortNamesCache cache = PsiShortNamesCache.getInstance(expression.getProject()); PsiElementFactory factory = facade.getElementFactory(); for (String fieldName : expectedFieldNames) { PsiField[] fields = cache.getFieldsByNameIfNotMoreThan(fieldName, resolveScope, MAX_RAW_GUESSED_MEMBERS_COUNT); addMemberInfo(fields, expression, typesList, factory); } for (String methodName : expectedMethodNames) { PsiMethod[] projectMethods = cache.getMethodsByNameIfNotMoreThan(methodName, resolveScope.intersectWith(GlobalSearchScope.projectScope(manager.getProject())), MAX_RAW_GUESSED_MEMBERS_COUNT); PsiMethod[] libraryMethods = cache.getMethodsByNameIfNotMoreThan(methodName, resolveScope.intersectWith(GlobalSearchScope.notScope(GlobalSearchScope.projectScope(manager.getProject()))), MAX_RAW_GUESSED_MEMBERS_COUNT); PsiMethod[] methods = ArrayUtil.mergeArrays(projectMethods, libraryMethods); addMemberInfo(methods, expression, typesList, factory); } } ExpectedTypeInfo[] expectedTypes = ExpectedTypeUtil.intersect(typesList); if (expectedTypes.length == 0 && !typesList.isEmpty()) { List<ExpectedTypeInfo> union = new ArrayList<>(); for (ExpectedTypeInfo[] aTypesList : typesList) { ContainerUtil.addAll(union, (ExpectedTypeInfo[])aTypesList); } expectedTypes = union.toArray(new ExpectedTypeInfo[union.size()]); } if (expectedTypes.length == 0) { PsiType t = allowVoidType ? PsiType.VOID : PsiType.getJavaLangObject(manager, resolveScope); expectedTypes = new ExpectedTypeInfo[] {ExpectedTypesProvider.createInfo(t, ExpectedTypeInfo.TYPE_OR_SUBTYPE, t, TailType.NONE)}; } return expectedTypes; } @NotNull static PsiType[] guessType(PsiExpression expression, final boolean allowVoidType) { final PsiManager manager = expression.getManager(); final GlobalSearchScope resolveScope = expression.getResolveScope(); List<ExpectedTypeInfo[]> typesList = new ArrayList<>(); final List<String> expectedMethodNames = new ArrayList<>(); final List<String> expectedFieldNames = new ArrayList<>(); getExpectedInformation(expression, typesList, expectedMethodNames, expectedFieldNames); if (typesList.size() == 1 && (!expectedFieldNames.isEmpty() || !expectedMethodNames.isEmpty())) { ExpectedTypeInfo[] infos = typesList.get(0); if (infos.length == 1 && infos[0].getKind() == ExpectedTypeInfo.TYPE_OR_SUBTYPE && infos[0].getType().equals(PsiType.getJavaLangObject(manager, resolveScope))) { typesList.clear(); } } if (typesList.isEmpty()) { final JavaPsiFacade facade = JavaPsiFacade.getInstance(manager.getProject()); final PsiShortNamesCache cache = PsiShortNamesCache.getInstance(expression.getProject()); PsiElementFactory factory = facade.getElementFactory(); for (String fieldName : expectedFieldNames) { PsiField[] fields = cache.getFieldsByNameIfNotMoreThan(fieldName, resolveScope, MAX_RAW_GUESSED_MEMBERS_COUNT); addMemberInfo(fields, expression, typesList, factory); } for (String methodName : expectedMethodNames) { PsiMethod[] methods = cache.getMethodsByNameIfNotMoreThan(methodName, resolveScope, MAX_RAW_GUESSED_MEMBERS_COUNT); addMemberInfo(methods, expression, typesList, factory); } } ExpectedTypeInfo[] expectedTypes = ExpectedTypeUtil.intersect(typesList); if (expectedTypes.length == 0 && !typesList.isEmpty()) { List<ExpectedTypeInfo> union = new ArrayList<>(); for (ExpectedTypeInfo[] aTypesList : typesList) { ContainerUtil.addAll(union, (ExpectedTypeInfo[])aTypesList); } expectedTypes = union.toArray(new ExpectedTypeInfo[union.size()]); } if (expectedTypes.length == 0) { return allowVoidType ? new PsiType[]{PsiType.VOID} : new PsiType[]{PsiType.getJavaLangObject(manager, resolveScope)}; } else { //Double check to avoid expensive operations on PsiClassTypes final Set<PsiType> typesSet = new HashSet<>(); PsiTypeVisitor<PsiType> visitor = new PsiTypeVisitor<PsiType>() { @Override @Nullable public PsiType visitType(PsiType type) { if (PsiType.NULL.equals(type) || PsiType.VOID.equals(type) && !allowVoidType) { type = PsiType.getJavaLangObject(manager, resolveScope); } if (!typesSet.contains(type)) { if (type instanceof PsiClassType && (!expectedFieldNames.isEmpty() || !expectedMethodNames.isEmpty())) { PsiClass aClass = ((PsiClassType) type).resolve(); if (aClass != null) { for (String fieldName : expectedFieldNames) { if (aClass.findFieldByName(fieldName, true) == null) return null; } for (String methodName : expectedMethodNames) { PsiMethod[] methods = aClass.findMethodsByName(methodName, true); if (methods.length == 0) return null; } } } typesSet.add(type); return type; } return null; } @Override public PsiType visitCapturedWildcardType(PsiCapturedWildcardType capturedWildcardType) { return capturedWildcardType.getUpperBound().accept(this); } }; PsiType[] types = ExpectedTypesProvider.processExpectedTypes(expectedTypes, visitor, manager.getProject()); if (types.length == 0) { return allowVoidType ? new PsiType[]{PsiType.VOID} : new PsiType[]{PsiType.getJavaLangObject(manager, resolveScope)}; } return types; } } private static void addMemberInfo(PsiMember[] members, final PsiExpression expression, List<ExpectedTypeInfo[]> types, PsiElementFactory factory) { Arrays.sort(members, (m1, m2) -> compareMembers(m1, m2, expression)); List<ExpectedTypeInfo> l = new ArrayList<>(); PsiManager manager = expression.getManager(); JavaPsiFacade facade = JavaPsiFacade.getInstance(manager.getProject()); for (PsiMember member : members) { ProgressManager.checkCanceled(); PsiClass aClass = member.getContainingClass(); if (aClass instanceof PsiAnonymousClass || aClass == null) continue; if (facade.getResolveHelper().isAccessible(member, expression, null)) { PsiClassType type; final PsiElement pparent = expression.getParent().getParent(); if (pparent instanceof PsiMethodCallExpression && member instanceof PsiMethod) { PsiSubstitutor substitutor = ExpectedTypeUtil.inferSubstitutor((PsiMethod)member, (PsiMethodCallExpression)pparent, false); if (substitutor == null) { type = factory.createType(aClass); } else { type = factory.createType(aClass, substitutor); } } else { type = factory.createType(aClass); } l.add(ExpectedTypesProvider.createInfo(type, ExpectedTypeInfo.TYPE_OR_SUBTYPE, type, TailType.NONE)); if (l.size() == MAX_GUESSED_MEMBERS_COUNT) break; } } if (!l.isEmpty()) { types.add(l.toArray(new ExpectedTypeInfo[l.size()])); } } private static int compareMembers(PsiMember m1, PsiMember m2, PsiExpression context) { ProgressManager.checkCanceled(); int result = JavaStatisticsManager.createInfo(null, m2).getUseCount() - JavaStatisticsManager.createInfo(null, m1).getUseCount(); if (result != 0) return result; final PsiClass aClass = m1.getContainingClass(); final PsiClass bClass = m2.getContainingClass(); if (aClass != null && bClass != null) { result = JavaStatisticsManager.createInfo(null, bClass).getUseCount() - JavaStatisticsManager.createInfo(null, aClass).getUseCount(); if (result != 0) return result; } WeighingComparable<PsiElement,ProximityLocation> proximity1 = PsiProximityComparator.getProximity(m1, context); WeighingComparable<PsiElement,ProximityLocation> proximity2 = PsiProximityComparator.getProximity(m2, context); if (proximity1 != null && proximity2 != null) { result = proximity2.compareTo(proximity1); if (result != 0) return result; } String name1 = PsiUtil.getMemberQualifiedName(m1); String name2 = PsiUtil.getMemberQualifiedName(m2); return Comparing.compare(name1, name2); } public static boolean isAccessedForWriting(final PsiExpression[] expressionOccurences) { for (PsiExpression expression : expressionOccurences) { if(expression.isValid() && PsiUtil.isAccessedForWriting(expression)) return true; } return false; } static boolean shouldShowTag(int offset, PsiElement namedElement, PsiElement element) { if (namedElement == null) return false; TextRange range = namedElement.getTextRange(); if (range.getLength() == 0) return false; boolean isInNamedElement = range.contains(offset); return isInNamedElement || element.getTextRange().contains(offset-1); } public static void addClassesWithMember(final String memberName, final PsiFile file, final Set<String> possibleClassNames, final boolean method, final boolean staticAccess) { addClassesWithMember(memberName, file, possibleClassNames, method, staticAccess, true); } public static void addClassesWithMember(final String memberName, final PsiFile file, final Set<String> possibleClassNames, final boolean method, final boolean staticAccess, final boolean addObjectInheritors) { final Project project = file.getProject(); final Module moduleForFile = ModuleUtilCore.findModuleForPsiElement(file); if (moduleForFile == null) return; final GlobalSearchScope searchScope = ReadAction.compute(file::getResolveScope); GlobalSearchScope descendantsSearchScope = GlobalSearchScope.moduleWithDependenciesScope(moduleForFile); final JavaPsiFacade facade = JavaPsiFacade.getInstance(project); final PsiShortNamesCache cache = PsiShortNamesCache.getInstance(project); if (handleObjectMethod(possibleClassNames, facade, searchScope, method, memberName, staticAccess, addObjectInheritors)) { return; } final PsiMember[] members = ReadAction.compute( () -> method ? cache.getMethodsByName(memberName, searchScope) : cache.getFieldsByName(memberName, searchScope)); for (int i = 0; i < members.length; ++i) { final PsiMember member = members[i]; if (hasCorrectModifiers(member, staticAccess)) { final PsiClass containingClass = member.getContainingClass(); if (containingClass != null) { final String qName = getQualifiedName(containingClass); if (qName == null) continue; ClassInheritorsSearch.search(containingClass, descendantsSearchScope, true, true, false).forEach(psiClass -> { ContainerUtil.addIfNotNull(possibleClassNames, getQualifiedName(psiClass)); return true; }); possibleClassNames.add(qName); } } members[i] = null; } } private static boolean handleObjectMethod(Set<String> possibleClassNames, final JavaPsiFacade facade, final GlobalSearchScope searchScope, final boolean method, final String memberName, final boolean staticAccess, boolean addInheritors) { final PsiShortNamesCache cache = PsiShortNamesCache.getInstance(facade.getProject()); final boolean[] allClasses = {false}; ReadAction.run(() -> { final PsiClass objectClass = facade.findClass(CommonClassNames.JAVA_LANG_OBJECT, searchScope); if (objectClass != null) { if (method && objectClass.findMethodsByName(memberName, false).length > 0) { allClasses[0] = true; } else if (!method) { final PsiField field = objectClass.findFieldByName(memberName, false); if (hasCorrectModifiers(field, staticAccess)) { allClasses[0] = true; } } } }); if (allClasses[0]) { possibleClassNames.add(CommonClassNames.JAVA_LANG_OBJECT); if (!addInheritors) { return true; } final String[] strings = ReadAction.compute(cache::getAllClassNames); for (final String className : strings) { final PsiClass[] classes = ReadAction.compute(() -> cache.getClassesByName(className, searchScope)); for (final PsiClass aClass : classes) { final String qname = getQualifiedName(aClass); ContainerUtil.addIfNotNull(possibleClassNames, qname); } } return true; } return false; } @Nullable private static String getQualifiedName(final PsiClass aClass) { return ReadAction.compute(aClass::getQualifiedName); } private static boolean hasCorrectModifiers(@Nullable final PsiMember member, final boolean staticAccess) { if (member == null) { return false; } return ReadAction.compute(() -> !member.hasModifierProperty(PsiModifier.PRIVATE) && member.hasModifierProperty(PsiModifier.STATIC) == staticAccess).booleanValue(); } private static class ParameterNameExpression extends Expression { private final String[] myNames; private ParameterNameExpression(String[] names) { myNames = names; } @Override public Result calculateResult(ExpressionContext context) { LookupElement[] lookupItems = calculateLookupItems(context); if (lookupItems.length == 0) return new TextResult(""); return new TextResult(lookupItems[0].getLookupString()); } @Override public Result calculateQuickResult(ExpressionContext context) { return null; } @Override @NotNull public LookupElement[] calculateLookupItems(ExpressionContext context) { Project project = context.getProject(); int offset = context.getStartOffset(); PsiDocumentManager.getInstance(project).commitAllDocuments(); PsiFile file = PsiDocumentManager.getInstance(project).getPsiFile(context.getEditor().getDocument()); assert file != null; PsiElement elementAt = file.findElementAt(offset); PsiParameterList parameterList = PsiTreeUtil.getParentOfType(elementAt, PsiParameterList.class); if (parameterList == null) { if (elementAt == null) return LookupElement.EMPTY_ARRAY; final PsiElement parent = elementAt.getParent(); if (parent instanceof PsiMethod) { parameterList = ((PsiMethod)parent).getParameterList(); } else { return LookupElement.EMPTY_ARRAY; } } PsiParameter parameter = PsiTreeUtil.getParentOfType(elementAt, PsiParameter.class); Set<String> parameterNames = new HashSet<>(); for (PsiParameter psiParameter : parameterList.getParameters()) { if (psiParameter == parameter) continue; parameterNames.add(psiParameter.getName()); } Set<LookupElement> set = new LinkedHashSet<>(); for (String name : myNames) { if (parameterNames.contains(name)) { int j = 1; while (parameterNames.contains(name + j)) j++; name += j; } set.add(LookupElementBuilder.create(name)); } String[] suggestedNames = ExpressionUtil.getNames(context); if (suggestedNames != null) { for (String name : suggestedNames) { if (parameterNames.contains(name)) { int j = 1; while (parameterNames.contains(name + j)) j++; name += j; } set.add(LookupElementBuilder.create(name)); } } return set.toArray(new LookupElement[set.size()]); } } public static void invokeActionInTargetEditor(@NotNull PsiElement psiElement, @NotNull Function0<List<IntentionAction>> getActions) { IntentionAction action = CollectionsKt.firstOrNull(getActions.invoke()); if (action == null) return; Editor targetEditor = EditorHelper.openInEditor(psiElement); action.invoke(psiElement.getProject(), targetEditor, psiElement.getContainingFile()); } }
/* * Strongback * Copyright 2015, Strongback and individual contributors by the @authors tag. * See the COPYRIGHT.txt in the distribution for a full listing of individual * contributors. * * Licensed under the MIT License; you may not use this file except in * compliance with the License. You may obtain a copy of the License at * http://opensource.org/licenses/MIT * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.strongback.drive; import org.strongback.annotation.Experimental; import org.strongback.command.Command; import org.strongback.command.Requirable; import org.strongback.components.AngleSensor; import org.strongback.components.Motor; import org.strongback.components.Stoppable; import org.strongback.function.DoubleToDoubleFunction; import org.strongback.util.Values; /** * Control logic for a {@link MecanumDrive mecanum drive system}. This controller provides * {@link #cartesian(double, double, double) cartesian} and {@link #polar(double, double, double) polar} inputs. * <p> * This drive train will work for these configurations: * <ul> * <li>Mecanum - 4 mecanum wheels on the four corners of the robot; or</li> * <li>Holonomic - 4 omni wheels arranged so that the front and back wheels are toed in 45 degrees, which form an X across the * robot when viewed from above.</li> * </ul> * <p> * This drive implements {@link Requirable} so that {@link Command}s can use it directly when {@link Command#execute() * executing}. It is also designed to be driven by joystick axes. * * <p> * <em>NOTE: This class is experimental and needs to be thoroughly tested and debugged using actual hardware.</em> * * @author Randall Hauch */ @Experimental public class MecanumDrive implements Stoppable, Requirable { public static final double DEFAULT_MINIMUM_SPEED = 0.02; public static final double DEFAULT_MAXIMUM_SPEED = 1.0; public static final DoubleToDoubleFunction DEFAULT_SPEED_LIMITER = Values.symmetricLimiter(DEFAULT_MINIMUM_SPEED, DEFAULT_MAXIMUM_SPEED); private static final double SQRT_OF_TWO = Math.sqrt(2.0); private static final int NUMBER_OF_MOTORS = 4; private static final int LEFT_FRONT = 0; private static final int RIGHT_FRONT = 1; private static final int LEFT_REAR = 2; private static final int RIGHT_REAR = 3; private static final double OUTPUT_SCALE_FACTOR = 1.0; private final Motor leftFront; private final Motor leftRear; private final Motor rightFront; private final Motor rightRear; private final AngleSensor gyro; private final DoubleToDoubleFunction speedLimiter; /** * Creates a new DriveSystem subsystem that uses the supplied drive train and no shifter. The voltage send to the drive * train is limited to [-1.0,1.0]. * * @param leftFront the left front motor on the drive train for the robot; may not be null * @param leftRear the left rear motor on the drive train for the robot; may not be null * @param rightFront the right front motor on the drive train for the robot; may not be null * @param rightRear the right rear motor on the drive train for the robot; may not be null * @param gyro the gyroscope that will be used to determine the robot's direction for field-orientated controls; may not be * null */ public MecanumDrive(Motor leftFront, Motor leftRear, Motor rightFront, Motor rightRear, AngleSensor gyro) { this(leftFront, leftRear, rightFront, rightRear, gyro, null); } /** * Creates a new DriveSystem subsystem that uses the supplied drive train and optional shifter. The voltage send to the * drive train is limited by the given function. * * @param leftFront the left front motor on the drive train for the robot; may not be null * @param leftRear the left rear motor on the drive train for the robot; may not be null * @param rightFront the right front motor on the drive train for the robot; may not be null * @param rightRear the right rear motor on the drive train for the robot; may not be null * @param gyro the gyroscope that will be used to determine the robot's direction for field-orientated controls; may not be * null * @param speedLimiter the function that limits the speed sent to the drive train; if null, then a default clamping function * is used to limit to the range [-1.0,1.0] */ public MecanumDrive(Motor leftFront, Motor leftRear, Motor rightFront, Motor rightRear, AngleSensor gyro, DoubleToDoubleFunction speedLimiter) { this.leftFront = leftFront; this.leftRear = leftRear; this.rightFront = rightFront; this.rightRear = rightRear; this.gyro = gyro; this.speedLimiter = speedLimiter != null ? speedLimiter : DEFAULT_SPEED_LIMITER; } /** * Stop the drive train. This sets all motors to 0. */ @Override public void stop() { leftFront.stop(); rightFront.stop(); leftRear.stop(); rightRear.stop(); } /** * Cartesian drive method that specifies speeds in terms of the field longitudinal and lateral directions, using the drive's * angle sensor to automatically determine the robot's orientation relative to the field. * <p> * Using this method, the robot will move away from the drivers when the joystick is pushed forwards, and towards the * drivers when it is pulled towards them - regardless of what direction the robot is facing. * * @param x The speed that the robot should drive in the X direction. [-1.0..1.0] * @param y The speed that the robot should drive in the Y direction. This input is inverted to match the forward == -1.0 * that joysticks produce. [-1.0..1.0] * @param rotation The rate of rotation for the robot that is completely independent of the translation. [-1.0..1.0] */ public void cartesian(double x, double y, double rotation) { double xIn = x; double yIn = y; // Negate y for the joystick. yIn = -yIn; // Compensate for gyro angle. double rotated[] = rotateVector(xIn, yIn, gyro.getAngle()); xIn = rotated[0]; yIn = rotated[1]; double wheelSpeeds[] = new double[NUMBER_OF_MOTORS]; wheelSpeeds[LEFT_FRONT] = xIn + yIn + rotation; wheelSpeeds[RIGHT_FRONT] = -xIn + yIn - rotation; wheelSpeeds[LEFT_REAR] = -xIn + yIn + rotation; wheelSpeeds[RIGHT_REAR] = xIn + yIn - rotation; normalize(wheelSpeeds); scale(wheelSpeeds, OUTPUT_SCALE_FACTOR); leftFront.setSpeed(wheelSpeeds[LEFT_FRONT]); leftRear.setSpeed(wheelSpeeds[LEFT_REAR]); rightFront.setSpeed(wheelSpeeds[RIGHT_FRONT]); rightRear.setSpeed(wheelSpeeds[RIGHT_REAR]); } /** * Polar drive method that specifies speeds in terms of magnitude and direction. This method does not use the drive's angle * sensor. * * @param magnitude The speed that the robot should drive in a given direction. * @param direction The direction the robot should drive in degrees. The direction and magnitude are independent of the * rotation rate. * @param rotation The rate of rotation for the robot that is completely independent of the magnitude or direction. * [-1.0..1.0] */ public void polar(double magnitude, double direction, double rotation) { // Normalized for full power along the Cartesian axes. magnitude = speedLimiter.applyAsDouble(magnitude) * SQRT_OF_TWO; // The rollers are at 45 degree angles. double dirInRad = (direction + 45.0) * Math.PI / 180.0; double cosD = Math.cos(dirInRad); double sinD = Math.sin(dirInRad); double wheelSpeeds[] = new double[NUMBER_OF_MOTORS]; wheelSpeeds[LEFT_FRONT] = (sinD * magnitude + rotation); wheelSpeeds[RIGHT_FRONT] = (cosD * magnitude - rotation); wheelSpeeds[LEFT_REAR] = (cosD * magnitude + rotation); wheelSpeeds[RIGHT_REAR] = (sinD * magnitude - rotation); normalize(wheelSpeeds); scale(wheelSpeeds, OUTPUT_SCALE_FACTOR); leftFront.setSpeed(wheelSpeeds[LEFT_FRONT]); leftRear.setSpeed(wheelSpeeds[LEFT_REAR]); rightFront.setSpeed(wheelSpeeds[RIGHT_FRONT]); rightRear.setSpeed(wheelSpeeds[RIGHT_REAR]); } /** * Normalize all wheel speeds if the magnitude of any wheel is greater than 1.0. */ protected static void normalize(double wheelSpeeds[]) { double maxMagnitude = Math.abs(wheelSpeeds[0]); for (int i = 1; i < NUMBER_OF_MOTORS; i++) { double temp = Math.abs(wheelSpeeds[i]); if (maxMagnitude < temp) maxMagnitude = temp; } if (maxMagnitude > 1.0) { for (int i = 0; i < NUMBER_OF_MOTORS; i++) { wheelSpeeds[i] = wheelSpeeds[i] / maxMagnitude; } } } /** * Scale all speeds. */ protected static void scale(double wheelSpeeds[], double scaleFactor) { for (int i = 1; i < NUMBER_OF_MOTORS; i++) { wheelSpeeds[i] = wheelSpeeds[i] * scaleFactor; } } /** * Rotate a vector in Cartesian space. */ protected static double[] rotateVector(double x, double y, double angle) { double angleInRadians = Math.toRadians(angle); double cosA = Math.cos(angleInRadians); double sinA = Math.sin(angleInRadians); double out[] = new double[2]; out[0] = x * cosA - y * sinA; out[1] = x * sinA + y * cosA; return out; } }
/* * Copyright 2014 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.samples.apps.iosched.service; import android.util.Log; import com.google.samples.apps.iosched.Config; import com.google.samples.apps.iosched.R; import com.google.samples.apps.iosched.provider.ScheduleContract; import com.google.samples.apps.iosched.util.AccountUtils; import android.annotation.TargetApi; import android.app.IntentService; import android.content.ContentProviderOperation; import android.content.ContentResolver; import android.content.ContentValues; import android.content.Intent; import android.content.OperationApplicationException; import android.database.Cursor; import android.net.Uri; import android.os.Bundle; import android.os.RemoteException; import android.provider.CalendarContract; import android.text.TextUtils; import com.google.samples.apps.iosched.util.PrefUtils; import java.util.ArrayList; import static com.google.samples.apps.iosched.util.LogUtils.LOGE; import static com.google.samples.apps.iosched.util.LogUtils.LOGW; import static com.google.samples.apps.iosched.util.LogUtils.makeLogTag; /** * Background {@link android.app.Service} that adds or removes session Calendar events through * the {@link CalendarContract} API available in Android 4.0 or above. */ public class SessionCalendarService extends IntentService { private static final String TAG = makeLogTag(SessionCalendarService.class); public static final String ACTION_ADD_SESSION_CALENDAR = "com.google.samples.apps.iosched.action.ADD_SESSION_CALENDAR"; public static final String ACTION_REMOVE_SESSION_CALENDAR = "com.google.samples.apps.iosched.action.REMOVE_SESSION_CALENDAR"; public static final String ACTION_UPDATE_ALL_SESSIONS_CALENDAR = "com.google.samples.apps.iosched.action.UPDATE_ALL_SESSIONS_CALENDAR"; public static final String ACTION_UPDATE_ALL_SESSIONS_CALENDAR_COMPLETED = "com.google.samples.apps.iosched.action.UPDATE_CALENDAR_COMPLETED"; public static final String ACTION_CLEAR_ALL_SESSIONS_CALENDAR = "com.google.samples.apps.iosched.action.CLEAR_ALL_SESSIONS_CALENDAR"; public static final String EXTRA_ACCOUNT_NAME = "com.google.samples.apps.iosched.extra.ACCOUNT_NAME"; public static final String EXTRA_SESSION_START = "com.google.samples.apps.iosched.extra.SESSION_BLOCK_START"; public static final String EXTRA_SESSION_END = "com.google.samples.apps.iosched.extra.SESSION_BLOCK_END"; public static final String EXTRA_SESSION_TITLE = "com.google.samples.apps.iosched.extra.SESSION_TITLE"; public static final String EXTRA_SESSION_ROOM = "com.google.samples.apps.iosched.extra.SESSION_ROOM"; private static final long INVALID_CALENDAR_ID = -1; // TODO: localize private static final String CALENDAR_CLEAR_SEARCH_LIKE_EXPRESSION = "%added by Google I/O Android app%"; public SessionCalendarService() { super(TAG); } @Override protected void onHandleIntent(Intent intent) { final String action = intent.getAction(); Log.d(TAG, "Received intent: " + action); final ContentResolver resolver = getContentResolver(); boolean isAddEvent = false; if (ACTION_ADD_SESSION_CALENDAR.equals(action)) { isAddEvent = true; } else if (ACTION_REMOVE_SESSION_CALENDAR.equals(action)) { isAddEvent = false; } else if (ACTION_UPDATE_ALL_SESSIONS_CALENDAR.equals(action) && PrefUtils.shouldSyncCalendar(this)) { try { getContentResolver().applyBatch(CalendarContract.AUTHORITY, processAllSessionsCalendar(resolver, getCalendarId(intent))); sendBroadcast(new Intent( SessionCalendarService.ACTION_UPDATE_ALL_SESSIONS_CALENDAR_COMPLETED)); } catch (RemoteException e) { LOGE(TAG, "Error adding all sessions to Google Calendar", e); } catch (OperationApplicationException e) { LOGE(TAG, "Error adding all sessions to Google Calendar", e); } } else if (ACTION_CLEAR_ALL_SESSIONS_CALENDAR.equals(action)) { try { getContentResolver().applyBatch(CalendarContract.AUTHORITY, processClearAllSessions(resolver, getCalendarId(intent))); } catch (RemoteException e) { LOGE(TAG, "Error clearing all sessions from Google Calendar", e); } catch (OperationApplicationException e) { LOGE(TAG, "Error clearing all sessions from Google Calendar", e); } } else { return; } final Uri uri = intent.getData(); final Bundle extras = intent.getExtras(); if (uri == null || extras == null || !PrefUtils.shouldSyncCalendar(this)) { return; } try { resolver.applyBatch(CalendarContract.AUTHORITY, processSessionCalendar(resolver, getCalendarId(intent), isAddEvent, uri, extras.getLong(EXTRA_SESSION_START), extras.getLong(EXTRA_SESSION_END), extras.getString(EXTRA_SESSION_TITLE), extras.getString(EXTRA_SESSION_ROOM))); } catch (RemoteException e) { LOGE(TAG, "Error adding session to Google Calendar", e); } catch (OperationApplicationException e) { LOGE(TAG, "Error adding session to Google Calendar", e); } } /** * Gets the currently-logged in user's Google Calendar, or the Google Calendar for the user * specified in the given intent's {@link #EXTRA_ACCOUNT_NAME}. */ private long getCalendarId(Intent intent) { final String accountName; if (intent != null && intent.hasExtra(EXTRA_ACCOUNT_NAME)) { accountName = intent.getStringExtra(EXTRA_ACCOUNT_NAME); } else { accountName = AccountUtils.getActiveAccountName(this); } if (TextUtils.isEmpty(accountName)) { return INVALID_CALENDAR_ID; } // TODO: The calendar ID should be stored in shared preferences upon choosing an account. Cursor calendarsCursor = getContentResolver().query( CalendarContract.Calendars.CONTENT_URI, new String[]{"_id"}, // TODO: What if the calendar is not displayed or not sync'd? "account_name = ownerAccount and account_name = ?", new String[]{accountName}, null); long calendarId = INVALID_CALENDAR_ID; if (calendarsCursor != null && calendarsCursor.moveToFirst()) { calendarId = calendarsCursor.getLong(0); calendarsCursor.close(); } return calendarId; } private String makeCalendarEventTitle(String sessionTitle) { return sessionTitle + getResources().getString(R.string.session_calendar_suffix); } /** * Processes all sessions in the * {@link com.google.samples.apps.iosched.provider.ScheduleProvider}, adding or removing * calendar events to/from the specified Google Calendar depending on whether a session is * in the user's schedule or not. */ private ArrayList<ContentProviderOperation> processAllSessionsCalendar(ContentResolver resolver, final long calendarId) { ArrayList<ContentProviderOperation> batch = new ArrayList<ContentProviderOperation>(); // Unable to find the Calendar associated with the user. Stop here. if (calendarId == INVALID_CALENDAR_ID) { return batch; } // Retrieves all sessions. For each session, add to Calendar if starred and attempt to // remove from Calendar if unstarred. Cursor cursor = resolver.query( ScheduleContract.Sessions.CONTENT_URI, SessionsQuery.PROJECTION, null, null, null); if (cursor != null) { while (cursor.moveToNext()) { Uri uri = ScheduleContract.Sessions.buildSessionUri( Long.valueOf(cursor.getLong(0)).toString()); boolean isAddEvent = (cursor.getInt(SessionsQuery.SESSION_IN_MY_SCHEDULE) == 1); if (isAddEvent) { batch.addAll(processSessionCalendar(resolver, calendarId, isAddEvent, uri, cursor.getLong(SessionsQuery.SESSION_START), cursor.getLong(SessionsQuery.SESSION_END), cursor.getString(SessionsQuery.SESSION_TITLE), cursor.getString(SessionsQuery.ROOM_NAME))); } } cursor.close(); } return batch; } /** * Adds or removes a single session to/from the specified Google Calendar. */ private ArrayList<ContentProviderOperation> processSessionCalendar( final ContentResolver resolver, final long calendarId, final boolean isAddEvent, final Uri sessionUri, final long sessionBlockStart, final long sessionBlockEnd, final String sessionTitle, final String sessionRoom) { ArrayList<ContentProviderOperation> batch = new ArrayList<ContentProviderOperation>(); // Unable to find the Calendar associated with the user. Stop here. if (calendarId == INVALID_CALENDAR_ID) { return batch; } final String calendarEventTitle = makeCalendarEventTitle(sessionTitle); Cursor cursor; ContentValues values = new ContentValues(); // Add Calendar event. if (isAddEvent) { if (sessionBlockStart == 0L || sessionBlockEnd == 0L || sessionTitle == null) { LOGW(TAG, "Unable to add a Calendar event due to insufficient input parameters."); return batch; } // Check if the calendar event exists first. If it does, we don't want to add a // duplicate one. cursor = resolver.query( CalendarContract.Events.CONTENT_URI, // URI new String[] {CalendarContract.Events._ID}, // Projection CalendarContract.Events.CALENDAR_ID + "=? and " // Selection + CalendarContract.Events.TITLE + "=? and " + CalendarContract.Events.DTSTART + ">=? and " + CalendarContract.Events.DTEND + "<=?", new String[]{ // Selection args Long.valueOf(calendarId).toString(), calendarEventTitle, Long.toString(Config.CONFERENCE_START_MILLIS), Long.toString(Config.CONFERENCE_END_MILLIS) }, null); long newEventId = -1; if (cursor != null && cursor.moveToFirst()) { // Calendar event already exists for this session. newEventId = cursor.getLong(0); cursor.close(); // Data fix (workaround): batch.add( ContentProviderOperation.newUpdate(CalendarContract.Events.CONTENT_URI) .withValue(CalendarContract.Events.EVENT_TIMEZONE, Config.CONFERENCE_TIMEZONE.getID()) .withSelection(CalendarContract.Events._ID + "=?", new String[]{Long.valueOf(newEventId).toString()}) .build() ); // End data fix. } else { // Calendar event doesn't exist, create it. // NOTE: we can't use batch processing here because we need the result of // the insert. values.clear(); values.put(CalendarContract.Events.DTSTART, sessionBlockStart); values.put(CalendarContract.Events.DTEND, sessionBlockEnd); values.put(CalendarContract.Events.EVENT_LOCATION, sessionRoom); values.put(CalendarContract.Events.TITLE, calendarEventTitle); values.put(CalendarContract.Events.CALENDAR_ID, calendarId); values.put(CalendarContract.Events.EVENT_TIMEZONE, Config.CONFERENCE_TIMEZONE.getID()); Uri eventUri = resolver.insert(CalendarContract.Events.CONTENT_URI, values); String eventId = eventUri.getLastPathSegment(); if (eventId == null) { return batch; // Should be empty at this point } newEventId = Long.valueOf(eventId); // Since we're adding session reminder to system notification, we're not creating // Calendar event reminders. If we were to create Calendar event reminders, this // is how we would do it. //values.put(CalendarContract.Reminders.EVENT_ID, Integer.valueOf(eventId)); //values.put(CalendarContract.Reminders.MINUTES, 10); //values.put(CalendarContract.Reminders.METHOD, // CalendarContract.Reminders.METHOD_ALERT); // Or default? //cr.insert(CalendarContract.Reminders.CONTENT_URI, values); //values.clear(); } // Update the session in our own provider with the newly created calendar event ID. values.clear(); values.put(ScheduleContract.Sessions.SESSION_CAL_EVENT_ID, newEventId); resolver.update(sessionUri, values, null, null); } else { // Remove Calendar event, if exists. // Get the event calendar id. cursor = resolver.query(sessionUri, new String[] {ScheduleContract.Sessions.SESSION_CAL_EVENT_ID}, null, null, null); long calendarEventId = -1; if (cursor != null && cursor.moveToFirst()) { calendarEventId = cursor.getLong(0); cursor.close(); } // Try to remove the Calendar Event based on key. If successful, move on; // otherwise, remove the event based on Event title. int affectedRows = 0; if (calendarEventId != -1) { affectedRows = resolver.delete( CalendarContract.Events.CONTENT_URI, CalendarContract.Events._ID + "=?", new String[]{Long.valueOf(calendarEventId).toString()}); } if (affectedRows == 0) { resolver.delete(CalendarContract.Events.CONTENT_URI, String.format("%s=? and %s=? and %s=? and %s=?", CalendarContract.Events.CALENDAR_ID, CalendarContract.Events.TITLE, CalendarContract.Events.DTSTART, CalendarContract.Events.DTEND), new String[]{Long.valueOf(calendarId).toString(), calendarEventTitle, Long.valueOf(sessionBlockStart).toString(), Long.valueOf(sessionBlockEnd).toString()}); } // Remove the session and calendar event association. values.clear(); values.put(ScheduleContract.Sessions.SESSION_CAL_EVENT_ID, (Long) null); resolver.update(sessionUri, values, null, null); } return batch; } /** * Removes all calendar entries associated with Google I/O 2013. */ private ArrayList<ContentProviderOperation> processClearAllSessions( ContentResolver resolver, long calendarId) { ArrayList<ContentProviderOperation> batch = new ArrayList<ContentProviderOperation>(); // Unable to find the Calendar associated with the user. Stop here. if (calendarId == INVALID_CALENDAR_ID) { Log.e(TAG, "Unable to find Calendar for user"); return batch; } // Delete all calendar entries matching the given title within the given time period batch.add(ContentProviderOperation .newDelete(CalendarContract.Events.CONTENT_URI) .withSelection( CalendarContract.Events.CALENDAR_ID + " = ? and " + CalendarContract.Events.TITLE + " LIKE ? and " + CalendarContract.Events.DTSTART + ">= ? and " + CalendarContract.Events.DTEND + "<= ?", new String[]{ Long.toString(calendarId), CALENDAR_CLEAR_SEARCH_LIKE_EXPRESSION, Long.toString(Config.CONFERENCE_START_MILLIS), Long.toString(Config.CONFERENCE_END_MILLIS) } ) .build()); return batch; } private interface SessionsQuery { String[] PROJECTION = { ScheduleContract.Sessions._ID, ScheduleContract.Sessions.SESSION_START, ScheduleContract.Sessions.SESSION_END, ScheduleContract.Sessions.SESSION_TITLE, ScheduleContract.Sessions.ROOM_NAME, ScheduleContract.Sessions.SESSION_IN_MY_SCHEDULE, }; int _ID = 0; int SESSION_START = 1; int SESSION_END = 2; int SESSION_TITLE = 3; int ROOM_NAME = 4; int SESSION_IN_MY_SCHEDULE = 5; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.maven; import java.io.File; import java.util.Collections; import java.util.List; import java.util.regex.Matcher; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.plugins.annotations.ResolutionScope; import org.apache.velocity.VelocityContext; /** * Generates Camel Component based on a collection of APIs. */ @Mojo(name = "fromApis", requiresDependencyResolution = ResolutionScope.COMPILE_PLUS_RUNTIME, requiresProject = true, defaultPhase = LifecyclePhase.GENERATE_SOURCES, threadSafe = true) public class ApiComponentGeneratorMojo extends AbstractApiMethodBaseMojo { /** * List of API names, proxies and code generation settings. */ @Parameter(required = true) protected ApiProxy[] apis; /** * Common Javadoc code generation settings. */ @Parameter protected FromJavadoc fromJavadoc = new FromJavadoc(); /** * Names of options that can be set to null value if not specified. */ @Parameter private String[] nullableOptions; /** * Method alias patterns for all APIs. */ @Parameter private List<ApiMethodAlias> aliases = Collections.emptyList(); @Override public void executeInternal() throws MojoExecutionException { if (apis == null || apis.length == 0) { throw new MojoExecutionException("One or more API proxies are required"); } // fix apiName for single API use-case since Maven configurator sets empty parameters as null!!! if (apis.length == 1 && apis[0].getApiName() == null) { apis[0].setApiName(""); } // generate API methods for each API proxy for (ApiProxy api : apis) { // validate API configuration api.validate(); // create the appropriate code generator if signatureFile or fromJavaDoc are specified // this way users can skip generating API classes for duplicate proxy class references final AbstractApiMethodGeneratorMojo apiMethodGenerator = getApiMethodGenerator(api); if (apiMethodGenerator != null) { // configure API method properties and generate Proxy classes configureMethodGenerator(apiMethodGenerator, api); try { apiMethodGenerator.setProjectClassLoader(getProjectClassLoader()); // supply pre-constructed ClassLoader apiMethodGenerator.executeInternal(); // Call internal execute method } catch (Exception e) { final String msg = "Error generating source for " + api.getProxyClass() + ": " + e.getMessage(); throw new MojoExecutionException(msg, e); } } else { // make sure the proxy class is being generated elsewhere final String proxyClass = api.getProxyClass(); boolean found = false; for (ApiProxy other : apis) { if (other != api && proxyClass.equals(other.getProxyClass())) { found = true; break; } } if (!found) { throw new MojoExecutionException("Missing one of fromSignatureFile or fromJavadoc for " + proxyClass); } } // set common aliases if needed if (!aliases.isEmpty() && api.getAliases().isEmpty()) { api.setAliases(aliases); } // set common nullable options if needed if (api.getNullableOptions() == null) { api.setNullableOptions(nullableOptions); } } // generate ApiCollection mergeTemplate(getApiContext(), getApiCollectionFile(), "/api-collection.vm"); // generate ApiName mergeTemplate(getApiContext(), getApiNameFile(), "/api-name-enum.vm"); } private void configureMethodGenerator(AbstractApiMethodGeneratorMojo mojo, ApiProxy apiProxy) { // set AbstractGeneratorMojo properties mojo.componentName = componentName; mojo.scheme = scheme; mojo.outPackage = outPackage; mojo.componentPackage = componentPackage; mojo.project = project; // set AbstractSourceGeneratorMojo properties mojo.generatedSrcDir = generatedSrcDir; mojo.generatedTestDir = generatedTestDir; mojo.addCompileSourceRoots = addCompileSourceRoots; // set AbstractAPIMethodBaseMojo properties mojo.substitutions = apiProxy.getSubstitutions().length != 0 ? apiProxy.getSubstitutions() : substitutions; mojo.excludeConfigNames = apiProxy.getExcludeConfigNames() != null ? apiProxy.getExcludeConfigNames() : excludeConfigNames; mojo.excludeConfigTypes = apiProxy.getExcludeConfigTypes() != null ? apiProxy.getExcludeConfigTypes() : excludeConfigTypes; mojo.extraOptions = apiProxy.getExtraOptions() != null ? apiProxy.getExtraOptions() : extraOptions; // set AbstractAPIMethodGeneratorMojo properties mojo.proxyClass = apiProxy.getProxyClass(); } private AbstractApiMethodGeneratorMojo getApiMethodGenerator(ApiProxy api) { AbstractApiMethodGeneratorMojo apiMethodGenerator = null; final File signatureFile = api.getFromSignatureFile(); if (signatureFile != null) { final FileApiMethodGeneratorMojo fileMojo = new FileApiMethodGeneratorMojo(); fileMojo.signatureFile = signatureFile; apiMethodGenerator = fileMojo; } else { final FromJavadoc apiFromJavadoc = api.getFromJavadoc(); if (apiFromJavadoc != null) { final JavadocApiMethodGeneratorMojo javadocMojo = new JavadocApiMethodGeneratorMojo(); javadocMojo.excludePackages = apiFromJavadoc.getExcludePackages() != null ? apiFromJavadoc.getExcludePackages() : fromJavadoc.getExcludePackages(); javadocMojo.excludeClasses = apiFromJavadoc.getExcludeClasses() != null ? apiFromJavadoc.getExcludeClasses() : fromJavadoc.getExcludeClasses(); javadocMojo.includeMethods = apiFromJavadoc.getIncludeMethods() != null ? apiFromJavadoc.getIncludeMethods() : fromJavadoc.getIncludeMethods(); javadocMojo.excludeMethods = apiFromJavadoc.getExcludeMethods() != null ? apiFromJavadoc.getExcludeMethods() : fromJavadoc.getExcludeMethods(); javadocMojo.includeStaticMethods = apiFromJavadoc.getIncludeStaticMethods() != null ? apiFromJavadoc.getIncludeStaticMethods() : fromJavadoc.getIncludeStaticMethods(); apiMethodGenerator = javadocMojo; } } return apiMethodGenerator; } private VelocityContext getApiContext() { final VelocityContext context = new VelocityContext(); context.put("componentName", componentName); context.put("componentPackage", componentPackage); context.put("apis", apis); context.put("helper", getClass()); context.put("collectionName", getApiCollectionName()); context.put("apiNameEnum", getApiNameEnum()); return context; } private String getApiCollectionName() { return componentName + "ApiCollection"; } private String getApiNameEnum() { return componentName + "ApiName"; } private File getApiCollectionFile() { final StringBuilder fileName = getFileBuilder(); fileName.append(getApiCollectionName()).append(".java"); return new File(generatedSrcDir, fileName.toString()); } private File getApiNameFile() { final StringBuilder fileName = getFileBuilder(); fileName.append(getApiNameEnum()).append(".java"); return new File(generatedSrcDir, fileName.toString()); } private StringBuilder getFileBuilder() { final StringBuilder fileName = new StringBuilder(); fileName.append(outPackage.replaceAll("\\.", Matcher.quoteReplacement(File.separator))).append(File.separator); return fileName; } public static String getApiMethod(String proxyClass) { String proxyClassWithCanonicalName = getProxyClassWithCanonicalName(proxyClass); return proxyClassWithCanonicalName.substring(proxyClassWithCanonicalName.lastIndexOf('.') + 1) + "ApiMethod"; } public static String getEndpointConfig(String proxyClass) { String proxyClassWithCanonicalName = getProxyClassWithCanonicalName(proxyClass); return proxyClassWithCanonicalName.substring(proxyClassWithCanonicalName.lastIndexOf('.') + 1) + "EndpointConfiguration"; } private static String getProxyClassWithCanonicalName(String proxyClass) { return proxyClass.replace("$", ""); } public static String getEnumConstant(String enumValue) { if (enumValue == null || enumValue.isEmpty()) { return "DEFAULT"; } StringBuilder builder = new StringBuilder(); if (!Character.isJavaIdentifierStart(enumValue.charAt(0))) { builder.append('_'); } for (char c : enumValue.toCharArray()) { char upperCase = Character.toUpperCase(c); if (!Character.isJavaIdentifierPart(upperCase)) { builder.append('_'); } else { builder.append(upperCase); } } return builder.toString(); } public static String getNullableOptionValues(String[] nullableOptions) { if (nullableOptions == null || nullableOptions.length == 0) { return ""; } final StringBuilder builder = new StringBuilder(); final int nOptions = nullableOptions.length; int i = 0; for (String option : nullableOptions) { builder.append('"').append(option).append('"'); if (++i < nOptions) { builder.append(", "); } } return builder.toString(); } }
// Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.location.suplclient.asn1.supl2.lpp; // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // // import com.google.location.suplclient.asn1.base.Asn1Boolean; import com.google.location.suplclient.asn1.base.Asn1Object; import com.google.location.suplclient.asn1.base.Asn1Sequence; import com.google.location.suplclient.asn1.base.Asn1Tag; import com.google.location.suplclient.asn1.base.BitStream; import com.google.location.suplclient.asn1.base.BitStreamReader; import com.google.location.suplclient.asn1.base.SequenceComponent; import com.google.common.collect.ImmutableList; import java.util.Collection; import javax.annotation.Nullable; /** * */ public class GNSS_DifferentialCorrectionsSupport extends Asn1Sequence { // private static final Asn1Tag TAG_GNSS_DifferentialCorrectionsSupport = Asn1Tag.fromClassAndNumber(-1, -1); public GNSS_DifferentialCorrectionsSupport() { super(); } @Override @Nullable protected Asn1Tag getTag() { return TAG_GNSS_DifferentialCorrectionsSupport; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_GNSS_DifferentialCorrectionsSupport != null) { return ImmutableList.of(TAG_GNSS_DifferentialCorrectionsSupport); } else { return Asn1Sequence.getPossibleFirstTags(); } } /** * Creates a new GNSS_DifferentialCorrectionsSupport from encoded stream. */ public static GNSS_DifferentialCorrectionsSupport fromPerUnaligned(byte[] encodedBytes) { GNSS_DifferentialCorrectionsSupport result = new GNSS_DifferentialCorrectionsSupport(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new GNSS_DifferentialCorrectionsSupport from encoded stream. */ public static GNSS_DifferentialCorrectionsSupport fromPerAligned(byte[] encodedBytes) { GNSS_DifferentialCorrectionsSupport result = new GNSS_DifferentialCorrectionsSupport(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override protected boolean isExtensible() { return true; } @Override public boolean containsExtensionValues() { for (SequenceComponent extensionComponent : getExtensionComponents()) { if (extensionComponent.isExplicitlySet()) return true; } return false; } private GNSS_SignalIDs gnssSignalIDs_; public GNSS_SignalIDs getGnssSignalIDs() { return gnssSignalIDs_; } /** * @throws ClassCastException if value is not a GNSS_SignalIDs */ public void setGnssSignalIDs(Asn1Object value) { this.gnssSignalIDs_ = (GNSS_SignalIDs) value; } public GNSS_SignalIDs setGnssSignalIDsToNewInstance() { gnssSignalIDs_ = new GNSS_SignalIDs(); return gnssSignalIDs_; } private GNSS_DifferentialCorrectionsSupport.dgnss_ValidityTimeSupType dgnss_ValidityTimeSup_; public GNSS_DifferentialCorrectionsSupport.dgnss_ValidityTimeSupType getDgnss_ValidityTimeSup() { return dgnss_ValidityTimeSup_; } /** * @throws ClassCastException if value is not a GNSS_DifferentialCorrectionsSupport.dgnss_ValidityTimeSupType */ public void setDgnss_ValidityTimeSup(Asn1Object value) { this.dgnss_ValidityTimeSup_ = (GNSS_DifferentialCorrectionsSupport.dgnss_ValidityTimeSupType) value; } public GNSS_DifferentialCorrectionsSupport.dgnss_ValidityTimeSupType setDgnss_ValidityTimeSupToNewInstance() { dgnss_ValidityTimeSup_ = new GNSS_DifferentialCorrectionsSupport.dgnss_ValidityTimeSupType(); return dgnss_ValidityTimeSup_; } @Override public Iterable<? extends SequenceComponent> getComponents() { ImmutableList.Builder<SequenceComponent> builder = ImmutableList.builder(); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 0); @Override public boolean isExplicitlySet() { return getGnssSignalIDs() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getGnssSignalIDs(); } @Override public void setToNewInstance() { setGnssSignalIDsToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? GNSS_SignalIDs.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "gnssSignalIDs : " + getGnssSignalIDs().toIndentedString(indent); } }); builder.add(new SequenceComponent() { Asn1Tag tag = Asn1Tag.fromClassAndNumber(2, 1); @Override public boolean isExplicitlySet() { return getDgnss_ValidityTimeSup() != null; } @Override public boolean hasDefaultValue() { return false; } @Override public boolean isOptional() { return false; } @Override public Asn1Object getComponentValue() { return getDgnss_ValidityTimeSup(); } @Override public void setToNewInstance() { setDgnss_ValidityTimeSupToNewInstance(); } @Override public Collection<Asn1Tag> getPossibleFirstTags() { return tag == null ? GNSS_DifferentialCorrectionsSupport.dgnss_ValidityTimeSupType.getPossibleFirstTags() : ImmutableList.of(tag); } @Override public Asn1Tag getTag() { return tag; } @Override public boolean isImplicitTagging() { return true; } @Override public String toIndentedString(String indent) { return "dgnss_ValidityTimeSup : " + getDgnss_ValidityTimeSup().toIndentedString(indent); } }); return builder.build(); } @Override public Iterable<? extends SequenceComponent> getExtensionComponents() { ImmutableList.Builder<SequenceComponent> builder = ImmutableList.builder(); return builder.build(); } // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // /** * */ public static class dgnss_ValidityTimeSupType extends Asn1Boolean { // private static final Asn1Tag TAG_dgnss_ValidityTimeSupType = Asn1Tag.fromClassAndNumber(-1, -1); public dgnss_ValidityTimeSupType() { super(); } @Override @Nullable protected Asn1Tag getTag() { return TAG_dgnss_ValidityTimeSupType; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_dgnss_ValidityTimeSupType != null) { return ImmutableList.of(TAG_dgnss_ValidityTimeSupType); } else { return Asn1Boolean.getPossibleFirstTags(); } } /** * Creates a new dgnss_ValidityTimeSupType from encoded stream. */ public static dgnss_ValidityTimeSupType fromPerUnaligned(byte[] encodedBytes) { dgnss_ValidityTimeSupType result = new dgnss_ValidityTimeSupType(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new dgnss_ValidityTimeSupType from encoded stream. */ public static dgnss_ValidityTimeSupType fromPerAligned(byte[] encodedBytes) { dgnss_ValidityTimeSupType result = new dgnss_ValidityTimeSupType(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { return "dgnss_ValidityTimeSupType = " + getValue() + ";\n"; } } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { StringBuilder builder = new StringBuilder(); builder.append("GNSS_DifferentialCorrectionsSupport = {\n"); final String internalIndent = indent + " "; for (SequenceComponent component : getComponents()) { if (component.isExplicitlySet()) { builder.append(internalIndent) .append(component.toIndentedString(internalIndent)); } } if (isExtensible()) { builder.append(internalIndent).append("...\n"); for (SequenceComponent component : getExtensionComponents()) { if (component.isExplicitlySet()) { builder.append(internalIndent) .append(component.toIndentedString(internalIndent)); } } } builder.append(indent).append("};\n"); return builder.toString(); } }
/* * This file is part of SpongeAPI, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package me.kenzierocks.autoergel.osadata.util.weighted; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Random; import com.google.common.collect.Lists; /** * A table of weighted entry, each entry is given a weight, the higher the * weight the more likely the chance that the entry is chosen. Each roll will * only return a single entries value. * * @param <T> * The entry type */ public class WeightedTable<T> extends RandomObjectTable<T> { private double totalWeight = 0; public WeightedTable() { super(1); } public WeightedTable(int rolls) { super(rolls); } @Override public boolean add(TableEntry<T> entry) { boolean added = super.add(entry); if (added) { recalculateWeight(); } return added; } @Override public boolean add(T object, double weight) { boolean added = super.add(object, weight); if (added) { recalculateWeight(); } return added; } @Override public boolean addAll(Collection<? extends TableEntry<T>> c) { boolean added = super.addAll(c); if (added) { recalculateWeight(); } return added; } @Override public boolean remove(Object entry) { boolean removed = super.remove(entry); if (removed) { recalculateWeight(); } return removed; } @Override public boolean removeObject(Object entry) { boolean removed = super.removeObject(entry); if (removed) { recalculateWeight(); } return removed; } @Override public boolean removeAll(Collection<?> c) { boolean removed = super.removeAll(c); if (removed) { recalculateWeight(); } return removed; } @Override public boolean retainAll(Collection<?> c) { boolean removed = super.retainAll(c); if (removed) { recalculateWeight(); } return removed; } @Override public void clear() { super.clear(); recalculateWeight(); } /** * Recalculates the total weight of all entries in this table. */ protected void recalculateWeight() { this.totalWeight = 0; for (Iterator<TableEntry<T>> it = this.entries.iterator(); it .hasNext();) { TableEntry<T> entry = it.next(); if (entry.getWeight() < 0) { // Negative weights on entries will really break this, so we // remove them if found, this is fine as a negatively weighted // entry should never be picked anyway it.remove(); } else { this.totalWeight += entry.getWeight(); } } } @Override public List<T> get(Random rand) { List<T> results = Lists.newArrayList(); if (this.entries.isEmpty()) { return results; } for (int i = 0; i < getRolls(); i++) { double roll = rand.nextDouble() * this.totalWeight; for (Iterator<TableEntry<T>> it = this.entries.iterator(); it .hasNext();) { TableEntry<T> next = it.next(); roll -= next.getWeight(); if (roll <= 0) { if (next instanceof NestedTableEntry) { results.addAll(((NestedTableEntry<T>) next).get(rand)); } else if (next instanceof WeightedObject) { results.add(((WeightedObject<T>) next).get()); } break; } } } return results; } @Override public Iterator<TableEntry<T>> iterator() { return new Itr(); } @Override public boolean equals(Object o) { if (o == this) { return true; } if (!(o instanceof WeightedTable)) { return false; } WeightedTable<?> c = (WeightedTable<?>) o; if (getRolls() != c.getRolls()) { return false; } if (this.entries.size() != c.entries.size()) { return false; } for (int i = 0; i < this.entries.size(); i++) { if (!this.entries.get(i).equals(c.entries.get(i))) { return false; } } return true; } @Override public int hashCode() { int r = 1; r = r * 37 + getRolls(); for (TableEntry<T> entry : this.entries) { r = r * 37 + entry.hashCode(); } return r; } @Override public String toString() { StringBuilder r = new StringBuilder(); r.append("WeightedTable (rolls=").append(getRolls()); r.append(",entries=").append(this.entries.size()).append(") {\n"); for (TableEntry<T> entry : this.entries) { r.append("\t").append(entry.toString()).append("\n"); } r.append("}"); return r.toString(); } /** * An iterator which will properly trigger a rebuild of the total weight on * removal. */ private class Itr implements Iterator<TableEntry<T>> { private final Iterator<TableEntry<T>> iter; protected Itr() { this.iter = WeightedTable.super.iterator(); } @Override public boolean hasNext() { return this.iter.hasNext(); } @Override public TableEntry<T> next() { return this.iter.next(); } @Override public void remove() { this.iter.remove(); WeightedTable.this.recalculateWeight(); } } }
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.identitymanagement.model; import java.io.Serializable; import com.amazonaws.AmazonWebServiceRequest; /** * Container for the parameters to the {@link com.amazonaws.services.identitymanagement.AmazonIdentityManagement#listUserPolicies(ListUserPoliciesRequest) ListUserPolicies operation}. * <p> * Lists the names of the inline policies embedded in the specified user. * </p> * <p> * A user can also have managed policies attached to it. To list the * managed policies that are attached to a user, use * ListAttachedUserPolicies. For more information about policies, refer * to * <a href="http://docs.aws.amazon.com/IAM/latest/UserGuide/policies-managed-vs-inline.html"> Managed Policies and Inline Policies </a> * in the <i>IAM User Guide</i> . * </p> * <p> * You can paginate the results using the <code>MaxItems</code> and * <code>Marker</code> parameters. If there are no inline policies * embedded with the specified user, the action returns an empty list. * </p> * * @see com.amazonaws.services.identitymanagement.AmazonIdentityManagement#listUserPolicies(ListUserPoliciesRequest) */ public class ListUserPoliciesRequest extends AmazonWebServiceRequest implements Serializable, Cloneable { /** * The name of the user to list policies for. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 128<br/> * <b>Pattern: </b>[\w+=,.@-]+<br/> */ private String userName; /** * Use this parameter only when paginating results and only after you * receive a response indicating that the results are truncated. Set it * to the value of the <code>Marker</code> element in the response you * received to inform the next call about where to start. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 320<br/> * <b>Pattern: </b>[&#92;u0020-&#92;u00FF]+<br/> */ private String marker; /** * Use this only when paginating results to indicate the maximum number * of items you want in the response. If there are additional items * beyond the maximum you specify, the <code>IsTruncated</code> response * element is <code>true</code>. <p>This parameter is optional. If you do * not include it, it defaults to 100. Note that IAM might return fewer * results, even when there are more results available. If this is the * case, the <code>IsTruncated</code> response element returns * <code>true</code> and <code>Marker</code> contains a value to include * in the subsequent call that tells the service where to continue from. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>1 - 1000<br/> */ private Integer maxItems; /** * Default constructor for a new ListUserPoliciesRequest object. Callers should use the * setter or fluent setter (with...) methods to initialize this object after creating it. */ public ListUserPoliciesRequest() {} /** * Constructs a new ListUserPoliciesRequest object. * Callers should use the setter or fluent setter (with...) methods to * initialize any additional object members. * * @param userName The name of the user to list policies for. */ public ListUserPoliciesRequest(String userName) { setUserName(userName); } /** * The name of the user to list policies for. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 128<br/> * <b>Pattern: </b>[\w+=,.@-]+<br/> * * @return The name of the user to list policies for. */ public String getUserName() { return userName; } /** * The name of the user to list policies for. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 128<br/> * <b>Pattern: </b>[\w+=,.@-]+<br/> * * @param userName The name of the user to list policies for. */ public void setUserName(String userName) { this.userName = userName; } /** * The name of the user to list policies for. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 128<br/> * <b>Pattern: </b>[\w+=,.@-]+<br/> * * @param userName The name of the user to list policies for. * * @return A reference to this updated object so that method calls can be chained * together. */ public ListUserPoliciesRequest withUserName(String userName) { this.userName = userName; return this; } /** * Use this parameter only when paginating results and only after you * receive a response indicating that the results are truncated. Set it * to the value of the <code>Marker</code> element in the response you * received to inform the next call about where to start. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 320<br/> * <b>Pattern: </b>[&#92;u0020-&#92;u00FF]+<br/> * * @return Use this parameter only when paginating results and only after you * receive a response indicating that the results are truncated. Set it * to the value of the <code>Marker</code> element in the response you * received to inform the next call about where to start. */ public String getMarker() { return marker; } /** * Use this parameter only when paginating results and only after you * receive a response indicating that the results are truncated. Set it * to the value of the <code>Marker</code> element in the response you * received to inform the next call about where to start. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 320<br/> * <b>Pattern: </b>[&#92;u0020-&#92;u00FF]+<br/> * * @param marker Use this parameter only when paginating results and only after you * receive a response indicating that the results are truncated. Set it * to the value of the <code>Marker</code> element in the response you * received to inform the next call about where to start. */ public void setMarker(String marker) { this.marker = marker; } /** * Use this parameter only when paginating results and only after you * receive a response indicating that the results are truncated. Set it * to the value of the <code>Marker</code> element in the response you * received to inform the next call about where to start. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 320<br/> * <b>Pattern: </b>[&#92;u0020-&#92;u00FF]+<br/> * * @param marker Use this parameter only when paginating results and only after you * receive a response indicating that the results are truncated. Set it * to the value of the <code>Marker</code> element in the response you * received to inform the next call about where to start. * * @return A reference to this updated object so that method calls can be chained * together. */ public ListUserPoliciesRequest withMarker(String marker) { this.marker = marker; return this; } /** * Use this only when paginating results to indicate the maximum number * of items you want in the response. If there are additional items * beyond the maximum you specify, the <code>IsTruncated</code> response * element is <code>true</code>. <p>This parameter is optional. If you do * not include it, it defaults to 100. Note that IAM might return fewer * results, even when there are more results available. If this is the * case, the <code>IsTruncated</code> response element returns * <code>true</code> and <code>Marker</code> contains a value to include * in the subsequent call that tells the service where to continue from. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>1 - 1000<br/> * * @return Use this only when paginating results to indicate the maximum number * of items you want in the response. If there are additional items * beyond the maximum you specify, the <code>IsTruncated</code> response * element is <code>true</code>. <p>This parameter is optional. If you do * not include it, it defaults to 100. Note that IAM might return fewer * results, even when there are more results available. If this is the * case, the <code>IsTruncated</code> response element returns * <code>true</code> and <code>Marker</code> contains a value to include * in the subsequent call that tells the service where to continue from. */ public Integer getMaxItems() { return maxItems; } /** * Use this only when paginating results to indicate the maximum number * of items you want in the response. If there are additional items * beyond the maximum you specify, the <code>IsTruncated</code> response * element is <code>true</code>. <p>This parameter is optional. If you do * not include it, it defaults to 100. Note that IAM might return fewer * results, even when there are more results available. If this is the * case, the <code>IsTruncated</code> response element returns * <code>true</code> and <code>Marker</code> contains a value to include * in the subsequent call that tells the service where to continue from. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>1 - 1000<br/> * * @param maxItems Use this only when paginating results to indicate the maximum number * of items you want in the response. If there are additional items * beyond the maximum you specify, the <code>IsTruncated</code> response * element is <code>true</code>. <p>This parameter is optional. If you do * not include it, it defaults to 100. Note that IAM might return fewer * results, even when there are more results available. If this is the * case, the <code>IsTruncated</code> response element returns * <code>true</code> and <code>Marker</code> contains a value to include * in the subsequent call that tells the service where to continue from. */ public void setMaxItems(Integer maxItems) { this.maxItems = maxItems; } /** * Use this only when paginating results to indicate the maximum number * of items you want in the response. If there are additional items * beyond the maximum you specify, the <code>IsTruncated</code> response * element is <code>true</code>. <p>This parameter is optional. If you do * not include it, it defaults to 100. Note that IAM might return fewer * results, even when there are more results available. If this is the * case, the <code>IsTruncated</code> response element returns * <code>true</code> and <code>Marker</code> contains a value to include * in the subsequent call that tells the service where to continue from. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>1 - 1000<br/> * * @param maxItems Use this only when paginating results to indicate the maximum number * of items you want in the response. If there are additional items * beyond the maximum you specify, the <code>IsTruncated</code> response * element is <code>true</code>. <p>This parameter is optional. If you do * not include it, it defaults to 100. Note that IAM might return fewer * results, even when there are more results available. If this is the * case, the <code>IsTruncated</code> response element returns * <code>true</code> and <code>Marker</code> contains a value to include * in the subsequent call that tells the service where to continue from. * * @return A reference to this updated object so that method calls can be chained * together. */ public ListUserPoliciesRequest withMaxItems(Integer maxItems) { this.maxItems = maxItems; return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getUserName() != null) sb.append("UserName: " + getUserName() + ","); if (getMarker() != null) sb.append("Marker: " + getMarker() + ","); if (getMaxItems() != null) sb.append("MaxItems: " + getMaxItems() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getUserName() == null) ? 0 : getUserName().hashCode()); hashCode = prime * hashCode + ((getMarker() == null) ? 0 : getMarker().hashCode()); hashCode = prime * hashCode + ((getMaxItems() == null) ? 0 : getMaxItems().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListUserPoliciesRequest == false) return false; ListUserPoliciesRequest other = (ListUserPoliciesRequest)obj; if (other.getUserName() == null ^ this.getUserName() == null) return false; if (other.getUserName() != null && other.getUserName().equals(this.getUserName()) == false) return false; if (other.getMarker() == null ^ this.getMarker() == null) return false; if (other.getMarker() != null && other.getMarker().equals(this.getMarker()) == false) return false; if (other.getMaxItems() == null ^ this.getMaxItems() == null) return false; if (other.getMaxItems() != null && other.getMaxItems().equals(this.getMaxItems()) == false) return false; return true; } @Override public ListUserPoliciesRequest clone() { return (ListUserPoliciesRequest) super.clone(); } }
/* * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. * */ /** * @test * @bug 7192963 * @summary assert(_in[req-1] == this) failed: Must pass arg count to 'new' * * @run main/othervm/timeout=400 -Xbatch -Xmx64m TestByteVect */ public class TestByteVect { private static final int ARRLEN = 997; private static final int ITERS = 11000; public static void main(String args[]) { System.out.println("Testing Byte vectors"); int errn = test(); if (errn > 0) { System.err.println("FAILED: " + errn + " errors"); System.exit(97); } System.out.println("PASSED"); } static int test() { byte[] a0 = new byte[ARRLEN]; byte[] a1 = new byte[ARRLEN]; // Initialize for (int i=0; i<ARRLEN; i++) { a1[i] = (byte)i; } System.out.println("Warmup"); for (int i=0; i<ITERS; i++) { test_init(a0); test_addi(a0, a1); test_lsai(a0, a1); test_unrl_init(a0); test_unrl_addi(a0, a1); test_unrl_lsai(a0, a1); } // Test and verify results System.out.println("Verification"); int errn = 0; { test_init(a0); for (int i=0; i<ARRLEN; i++) { errn += verify("test_init: ", i, a0[i], (byte)(i&3)); } test_addi(a0, a1); for (int i=0; i<ARRLEN; i++) { errn += verify("test_addi: ", i, a0[i], (byte)(i+(i&3))); } test_lsai(a0, a1); for (int i=0; i<ARRLEN; i++) { errn += verify("test_lsai: ", i, a0[i], (byte)(i<<(i&3))); } test_unrl_init(a0); for (int i=0; i<ARRLEN; i++) { errn += verify("test_unrl_init: ", i, a0[i], (byte)(i&3)); } test_unrl_addi(a0, a1); for (int i=0; i<ARRLEN; i++) { errn += verify("test_unrl_addi: ", i, a0[i], (byte)(i+(i&3))); } test_unrl_lsai(a0, a1); for (int i=0; i<ARRLEN; i++) { errn += verify("test_unrl_lsai: ", i, a0[i], (byte)(i<<(i&3))); } } if (errn > 0) return errn; System.out.println("Time"); long start, end; start = System.currentTimeMillis(); for (int i=0; i<ITERS; i++) { test_init(a0); } end = System.currentTimeMillis(); System.out.println("test_init: " + (end - start)); start = System.currentTimeMillis(); for (int i=0; i<ITERS; i++) { test_addi(a0, a1); } end = System.currentTimeMillis(); System.out.println("test_addi: " + (end - start)); start = System.currentTimeMillis(); for (int i=0; i<ITERS; i++) { test_lsai(a0, a1); } end = System.currentTimeMillis(); System.out.println("test_lsai: " + (end - start)); start = System.currentTimeMillis(); for (int i=0; i<ITERS; i++) { test_unrl_init(a0); } end = System.currentTimeMillis(); System.out.println("test_unrl_init: " + (end - start)); start = System.currentTimeMillis(); for (int i=0; i<ITERS; i++) { test_unrl_addi(a0, a1); } end = System.currentTimeMillis(); System.out.println("test_unrl_addi: " + (end - start)); start = System.currentTimeMillis(); for (int i=0; i<ITERS; i++) { test_unrl_lsai(a0, a1); } end = System.currentTimeMillis(); System.out.println("test_unrl_lsai: " + (end - start)); return errn; } static void test_init(byte[] a0) { for (int i = 0; i < a0.length; i+=1) { a0[i] = (byte)(i&3); } } static void test_addi(byte[] a0, byte[] a1) { for (int i = 0; i < a0.length; i+=1) { a0[i] = (byte)(a1[i]+(i&3)); } } static void test_lsai(byte[] a0, byte[] a1) { for (int i = 0; i < a0.length; i+=1) { a0[i] = (byte)(a1[i]<<(i&3)); } } static void test_unrl_init(byte[] a0) { int i = 0; for (; i < a0.length-4; i+=4) { a0[i+0] = 0; a0[i+1] = 1; a0[i+2] = 2; a0[i+3] = 3; } for (; i < a0.length; i++) { a0[i] = (byte)(i&3); } } static void test_unrl_addi(byte[] a0, byte[] a1) { int i = 0; for (; i < a0.length-4; i+=4) { a0[i+0] = (byte)(a1[i+0]+0); a0[i+1] = (byte)(a1[i+1]+1); a0[i+2] = (byte)(a1[i+2]+2); a0[i+3] = (byte)(a1[i+3]+3); } for (; i < a0.length; i++) { a0[i] = (byte)(a1[i]+(i&3)); } } static void test_unrl_lsai(byte[] a0, byte[] a1) { int i = 0; for (; i < a0.length-4; i+=4) { a0[i+0] = (byte)(a1[i+0]<<0); a0[i+1] = (byte)(a1[i+1]<<1); a0[i+2] = (byte)(a1[i+2]<<2); a0[i+3] = (byte)(a1[i+3]<<3); } for (; i < a0.length; i++) { a0[i] = (byte)(a1[i]<<(i&3)); } } static int verify(String text, int i, byte elem, byte val) { if (elem != val) { System.err.println(text + "[" + i + "] = " + elem + " != " + val); return 1; } return 0; } }
/* -*- Mode: Java; c-basic-offset: 4; tab-width: 4; indent-tabs-mode: nil; -*- * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package org.mozilla.gecko; import org.mozilla.gecko.gfx.DisplayPortMetrics; import org.mozilla.gecko.gfx.ViewportMetrics; import android.content.res.Resources; import android.graphics.Point; import android.graphics.PointF; import android.graphics.Rect; import android.hardware.Sensor; import android.hardware.SensorEvent; import android.hardware.SensorManager; import android.location.Address; import android.location.Location; import android.os.Build; import android.os.SystemClock; import android.util.DisplayMetrics; import android.util.Log; import android.view.KeyEvent; import android.view.MotionEvent; import java.nio.ByteBuffer; /* We're not allowed to hold on to most events given to us * so we save the parts of the events we want to use in GeckoEvent. * Fields have different meanings depending on the event type. */ /* This class is referenced by Robocop via reflection; use care when * modifying the signature. */ public class GeckoEvent { private static final String LOGTAG = "GeckoEvent"; private static final int INVALID = -1; private static final int NATIVE_POKE = 0; private static final int KEY_EVENT = 1; private static final int MOTION_EVENT = 2; private static final int SENSOR_EVENT = 3; private static final int UNUSED1_EVENT = 4; private static final int LOCATION_EVENT = 5; private static final int IME_EVENT = 6; private static final int DRAW = 7; private static final int SIZE_CHANGED = 8; private static final int ACTIVITY_STOPPING = 9; private static final int ACTIVITY_PAUSING = 10; private static final int ACTIVITY_SHUTDOWN = 11; private static final int LOAD_URI = 12; private static final int SURFACE_CREATED = 13; private static final int SURFACE_DESTROYED = 14; private static final int GECKO_EVENT_SYNC = 15; private static final int ACTIVITY_START = 17; private static final int BROADCAST = 19; private static final int VIEWPORT = 20; private static final int VISITED = 21; private static final int NETWORK_CHANGED = 22; private static final int UNUSED3_EVENT = 23; private static final int ACTIVITY_RESUMING = 24; private static final int SCREENSHOT = 25; private static final int UNUSED2_EVENT = 26; private static final int SCREENORIENTATION_CHANGED = 27; private static final int COMPOSITOR_PAUSE = 28; private static final int COMPOSITOR_RESUME = 29; private static final int PAINT_LISTEN_START_EVENT = 30; private static final int NATIVE_GESTURE_EVENT = 31; /** * These DOM_KEY_LOCATION constants mirror the DOM KeyboardEvent's constants. * @see https://developer.mozilla.org/en-US/docs/DOM/KeyboardEvent#Key_location_constants */ private static final int DOM_KEY_LOCATION_STANDARD = 0; private static final int DOM_KEY_LOCATION_LEFT = 1; private static final int DOM_KEY_LOCATION_RIGHT = 2; private static final int DOM_KEY_LOCATION_NUMPAD = 3; private static final int DOM_KEY_LOCATION_MOBILE = 4; private static final int DOM_KEY_LOCATION_JOYSTICK = 5; public static final int IME_COMPOSITION_END = 0; public static final int IME_COMPOSITION_BEGIN = 1; public static final int IME_SET_TEXT = 2; public static final int IME_GET_TEXT = 3; public static final int IME_DELETE_TEXT = 4; public static final int IME_SET_SELECTION = 5; public static final int IME_GET_SELECTION = 6; public static final int IME_ADD_RANGE = 7; public static final int IME_RANGE_CARETPOSITION = 1; public static final int IME_RANGE_RAWINPUT = 2; public static final int IME_RANGE_SELECTEDRAWTEXT = 3; public static final int IME_RANGE_CONVERTEDTEXT = 4; public static final int IME_RANGE_SELECTEDCONVERTEDTEXT = 5; public static final int IME_RANGE_UNDERLINE = 1; public static final int IME_RANGE_FORECOLOR = 2; public static final int IME_RANGE_BACKCOLOR = 4; public static final int ACTION_MAGNIFY_START = 11; public static final int ACTION_MAGNIFY = 12; public static final int ACTION_MAGNIFY_END = 13; final public int mType; public int mAction; public long mTime; public Point[] mPoints; public int[] mPointIndicies; public int mPointerIndex; // index of the point that has changed public float[] mOrientations; public float[] mPressures; public Point[] mPointRadii; public Rect mRect; public double mX, mY, mZ; public int mMetaState, mFlags; public int mKeyCode, mUnicodeChar; public int mRepeatCount; public int mOffset, mCount; public String mCharacters, mCharactersExtra; public int mRangeType, mRangeStyles; public int mRangeForeColor, mRangeBackColor; public Location mLocation; public Address mAddress; public int mDomKeyLocation; public double mBandwidth; public boolean mCanBeMetered; public int mNativeWindow; public short mScreenOrientation; public ByteBuffer mBuffer; private GeckoEvent(int evType) { mType = evType; } public static GeckoEvent createPauseEvent(boolean isApplicationInBackground) { GeckoEvent event = new GeckoEvent(ACTIVITY_PAUSING); event.mFlags = isApplicationInBackground ? 0 : 1; return event; } public static GeckoEvent createResumeEvent(boolean isApplicationInBackground) { GeckoEvent event = new GeckoEvent(ACTIVITY_RESUMING); event.mFlags = isApplicationInBackground ? 0 : 1; return event; } public static GeckoEvent createStoppingEvent(boolean isApplicationInBackground) { GeckoEvent event = new GeckoEvent(ACTIVITY_STOPPING); event.mFlags = isApplicationInBackground ? 0 : 1; return event; } public static GeckoEvent createStartEvent(boolean isApplicationInBackground) { GeckoEvent event = new GeckoEvent(ACTIVITY_START); event.mFlags = isApplicationInBackground ? 0 : 1; return event; } public static GeckoEvent createShutdownEvent() { return new GeckoEvent(ACTIVITY_SHUTDOWN); } public static GeckoEvent createSyncEvent() { return new GeckoEvent(GECKO_EVENT_SYNC); } public static GeckoEvent createKeyEvent(KeyEvent k) { GeckoEvent event = new GeckoEvent(KEY_EVENT); event.initKeyEvent(k); return event; } public static GeckoEvent createCompositorPauseEvent() { return new GeckoEvent(COMPOSITOR_PAUSE); } public static GeckoEvent createCompositorResumeEvent() { return new GeckoEvent(COMPOSITOR_RESUME); } private void initKeyEvent(KeyEvent k) { mAction = k.getAction(); mTime = k.getEventTime(); mMetaState = k.getMetaState(); mFlags = k.getFlags(); mKeyCode = k.getKeyCode(); mUnicodeChar = k.getUnicodeChar(); mRepeatCount = k.getRepeatCount(); mCharacters = k.getCharacters(); mDomKeyLocation = isJoystickButton(mKeyCode) ? DOM_KEY_LOCATION_JOYSTICK : DOM_KEY_LOCATION_MOBILE; } /** * This method tests if a key is one of the described in: * https://bugzilla.mozilla.org/show_bug.cgi?id=756504#c0 * @param keyCode int with the key code (Android key constant from KeyEvent) * @return true if the key is one of the listed above, false otherwise. */ private static boolean isJoystickButton(int keyCode) { switch (keyCode) { case KeyEvent.KEYCODE_DPAD_CENTER: case KeyEvent.KEYCODE_DPAD_LEFT: case KeyEvent.KEYCODE_DPAD_RIGHT: case KeyEvent.KEYCODE_DPAD_DOWN: case KeyEvent.KEYCODE_DPAD_UP: return true; default: if (Build.VERSION.SDK_INT >= 12) { return KeyEvent.isGamepadButton(keyCode); } return GeckoEvent.isGamepadButton(keyCode); } } /** * This method is a replacement for the the KeyEvent.isGamepadButton method to be * compatible with Build.VERSION.SDK_INT < 12. This is an implementantion of the * same method isGamepadButton available after SDK 12. * @param keyCode int with the key code (Android key constant from KeyEvent). * @return True if the keycode is a gamepad button, such as {@link #KEYCODE_BUTTON_A}. */ private static boolean isGamepadButton(int keyCode) { switch (keyCode) { case KeyEvent.KEYCODE_BUTTON_A: case KeyEvent.KEYCODE_BUTTON_B: case KeyEvent.KEYCODE_BUTTON_C: case KeyEvent.KEYCODE_BUTTON_X: case KeyEvent.KEYCODE_BUTTON_Y: case KeyEvent.KEYCODE_BUTTON_Z: case KeyEvent.KEYCODE_BUTTON_L1: case KeyEvent.KEYCODE_BUTTON_R1: case KeyEvent.KEYCODE_BUTTON_L2: case KeyEvent.KEYCODE_BUTTON_R2: case KeyEvent.KEYCODE_BUTTON_THUMBL: case KeyEvent.KEYCODE_BUTTON_THUMBR: case KeyEvent.KEYCODE_BUTTON_START: case KeyEvent.KEYCODE_BUTTON_SELECT: case KeyEvent.KEYCODE_BUTTON_MODE: case KeyEvent.KEYCODE_BUTTON_1: case KeyEvent.KEYCODE_BUTTON_2: case KeyEvent.KEYCODE_BUTTON_3: case KeyEvent.KEYCODE_BUTTON_4: case KeyEvent.KEYCODE_BUTTON_5: case KeyEvent.KEYCODE_BUTTON_6: case KeyEvent.KEYCODE_BUTTON_7: case KeyEvent.KEYCODE_BUTTON_8: case KeyEvent.KEYCODE_BUTTON_9: case KeyEvent.KEYCODE_BUTTON_10: case KeyEvent.KEYCODE_BUTTON_11: case KeyEvent.KEYCODE_BUTTON_12: case KeyEvent.KEYCODE_BUTTON_13: case KeyEvent.KEYCODE_BUTTON_14: case KeyEvent.KEYCODE_BUTTON_15: case KeyEvent.KEYCODE_BUTTON_16: return true; default: return false; } } public static GeckoEvent createNativeGestureEvent(int action, PointF pt, double size) { GeckoEvent event = new GeckoEvent(NATIVE_GESTURE_EVENT); event.mAction = action; event.mCount = 1; event.mPoints = new Point[1]; PointF geckoPoint = new PointF(pt.x, pt.y); geckoPoint = GeckoApp.mAppContext.getLayerView().convertViewPointToLayerPoint(geckoPoint); if (geckoPoint == null) { // This could happen if Gecko isn't ready yet. return null; } event.mPoints[0] = new Point(Math.round(geckoPoint.x), Math.round(geckoPoint.y)); event.mX = size; event.mTime = System.currentTimeMillis(); return event; } public static GeckoEvent createMotionEvent(MotionEvent m) { GeckoEvent event = new GeckoEvent(MOTION_EVENT); event.initMotionEvent(m); return event; } private void initMotionEvent(MotionEvent m) { mAction = m.getAction(); mTime = (System.currentTimeMillis() - SystemClock.elapsedRealtime()) + m.getEventTime(); mMetaState = m.getMetaState(); switch (mAction & MotionEvent.ACTION_MASK) { case MotionEvent.ACTION_CANCEL: case MotionEvent.ACTION_UP: case MotionEvent.ACTION_POINTER_UP: case MotionEvent.ACTION_POINTER_DOWN: case MotionEvent.ACTION_DOWN: case MotionEvent.ACTION_MOVE: case MotionEvent.ACTION_HOVER_ENTER: case MotionEvent.ACTION_HOVER_MOVE: case MotionEvent.ACTION_HOVER_EXIT: { mCount = m.getPointerCount(); mPoints = new Point[mCount]; mPointIndicies = new int[mCount]; mOrientations = new float[mCount]; mPressures = new float[mCount]; mPointRadii = new Point[mCount]; mPointerIndex = (mAction & MotionEvent.ACTION_POINTER_INDEX_MASK) >> MotionEvent.ACTION_POINTER_INDEX_SHIFT; for (int i = 0; i < mCount; i++) { addMotionPoint(i, i, m); } break; } default: { mCount = 0; mPointerIndex = -1; mPoints = new Point[mCount]; mPointIndicies = new int[mCount]; mOrientations = new float[mCount]; mPressures = new float[mCount]; mPointRadii = new Point[mCount]; } } } public void addMotionPoint(int index, int eventIndex, MotionEvent event) { try { PointF geckoPoint = new PointF(event.getX(eventIndex), event.getY(eventIndex)); geckoPoint = GeckoApp.mAppContext.getLayerView().convertViewPointToLayerPoint(geckoPoint); mPoints[index] = new Point(Math.round(geckoPoint.x), Math.round(geckoPoint.y)); mPointIndicies[index] = event.getPointerId(eventIndex); // getToolMajor, getToolMinor and getOrientation are API Level 9 features if (Build.VERSION.SDK_INT >= 9) { double radians = event.getOrientation(eventIndex); mOrientations[index] = (float) Math.toDegrees(radians); // w3c touchevents spec does not allow orientations == 90 // this shifts it to -90, which will be shifted to zero below if (mOrientations[index] == 90) mOrientations[index] = -90; // w3c touchevent radius are given by an orientation between 0 and 90 // the radius is found by removing the orientation and measuring the x and y // radius of the resulting ellipse // for android orientations >= 0 and < 90, the major axis should correspond to // just reporting the y radius as the major one, and x as minor // however, for a radius < 0, we have to shift the orientation by adding 90, and // reverse which radius is major and minor if (mOrientations[index] < 0) { mOrientations[index] += 90; mPointRadii[index] = new Point((int)event.getToolMajor(eventIndex)/2, (int)event.getToolMinor(eventIndex)/2); } else { mPointRadii[index] = new Point((int)event.getToolMinor(eventIndex)/2, (int)event.getToolMajor(eventIndex)/2); } } else { float size = event.getSize(eventIndex); Resources resources = GeckoApp.mAppContext.getResources(); DisplayMetrics displaymetrics = resources.getDisplayMetrics(); size = size*Math.min(displaymetrics.heightPixels, displaymetrics.widthPixels); mPointRadii[index] = new Point((int)size,(int)size); mOrientations[index] = 0; } mPressures[index] = event.getPressure(eventIndex); } catch(Exception ex) { Log.e(LOGTAG, "Error creating motion point " + index, ex); mPointRadii[index] = new Point(0, 0); mPoints[index] = new Point(0, 0); } } private static int HalSensorAccuracyFor(int androidAccuracy) { switch (androidAccuracy) { case SensorManager.SENSOR_STATUS_UNRELIABLE: return GeckoHalDefines.SENSOR_ACCURACY_UNRELIABLE; case SensorManager.SENSOR_STATUS_ACCURACY_LOW: return GeckoHalDefines.SENSOR_ACCURACY_LOW; case SensorManager.SENSOR_STATUS_ACCURACY_MEDIUM: return GeckoHalDefines.SENSOR_ACCURACY_MED; case SensorManager.SENSOR_STATUS_ACCURACY_HIGH: return GeckoHalDefines.SENSOR_ACCURACY_HIGH; } return GeckoHalDefines.SENSOR_ACCURACY_UNKNOWN; } public static GeckoEvent createSensorEvent(SensorEvent s) { int sensor_type = s.sensor.getType(); GeckoEvent event = null; switch(sensor_type) { case Sensor.TYPE_ACCELEROMETER: event = new GeckoEvent(SENSOR_EVENT); event.mFlags = GeckoHalDefines.SENSOR_ACCELERATION; event.mMetaState = HalSensorAccuracyFor(s.accuracy); event.mX = s.values[0]; event.mY = s.values[1]; event.mZ = s.values[2]; break; case 10 /* Requires API Level 9, so just use the raw value - Sensor.TYPE_LINEAR_ACCELEROMETER*/ : event = new GeckoEvent(SENSOR_EVENT); event.mFlags = GeckoHalDefines.SENSOR_LINEAR_ACCELERATION; event.mMetaState = HalSensorAccuracyFor(s.accuracy); event.mX = s.values[0]; event.mY = s.values[1]; event.mZ = s.values[2]; break; case Sensor.TYPE_ORIENTATION: event = new GeckoEvent(SENSOR_EVENT); event.mFlags = GeckoHalDefines.SENSOR_ORIENTATION; event.mMetaState = HalSensorAccuracyFor(s.accuracy); event.mX = s.values[0]; event.mY = s.values[1]; event.mZ = s.values[2]; break; case Sensor.TYPE_GYROSCOPE: event = new GeckoEvent(SENSOR_EVENT); event.mFlags = GeckoHalDefines.SENSOR_GYROSCOPE; event.mMetaState = HalSensorAccuracyFor(s.accuracy); event.mX = Math.toDegrees(s.values[0]); event.mY = Math.toDegrees(s.values[1]); event.mZ = Math.toDegrees(s.values[2]); break; case Sensor.TYPE_PROXIMITY: event = new GeckoEvent(SENSOR_EVENT); event.mFlags = GeckoHalDefines.SENSOR_PROXIMITY; event.mMetaState = HalSensorAccuracyFor(s.accuracy); event.mX = s.values[0]; event.mY = 0; event.mZ = s.sensor.getMaximumRange(); break; case Sensor.TYPE_LIGHT: event = new GeckoEvent(SENSOR_EVENT); event.mFlags = GeckoHalDefines.SENSOR_LIGHT; event.mMetaState = HalSensorAccuracyFor(s.accuracy); event.mX = s.values[0]; break; } return event; } public static GeckoEvent createLocationEvent(Location l) { GeckoEvent event = new GeckoEvent(LOCATION_EVENT); event.mLocation = l; return event; } public static GeckoEvent createIMEEvent(int imeAction, int offset, int count) { GeckoEvent event = new GeckoEvent(IME_EVENT); event.mAction = imeAction; event.mOffset = offset; event.mCount = count; return event; } private void InitIMERange(int action, int offset, int count, int rangeType, int rangeStyles, int rangeForeColor, int rangeBackColor) { mAction = action; mOffset = offset; mCount = count; mRangeType = rangeType; mRangeStyles = rangeStyles; mRangeForeColor = rangeForeColor; mRangeBackColor = rangeBackColor; return; } public static GeckoEvent createIMERangeEvent(int offset, int count, int rangeType, int rangeStyles, int rangeForeColor, int rangeBackColor, String text) { GeckoEvent event = new GeckoEvent(IME_EVENT); event.InitIMERange(IME_SET_TEXT, offset, count, rangeType, rangeStyles, rangeForeColor, rangeBackColor); event.mCharacters = text; return event; } public static GeckoEvent createIMERangeEvent(int offset, int count, int rangeType, int rangeStyles, int rangeForeColor, int rangeBackColor) { GeckoEvent event = new GeckoEvent(IME_EVENT); event.InitIMERange(IME_ADD_RANGE, offset, count, rangeType, rangeStyles, rangeForeColor, rangeBackColor); return event; } public static GeckoEvent createDrawEvent(Rect rect) { GeckoEvent event = new GeckoEvent(DRAW); event.mRect = rect; return event; } public static GeckoEvent createSizeChangedEvent(int w, int h, int screenw, int screenh) { GeckoEvent event = new GeckoEvent(SIZE_CHANGED); event.mPoints = new Point[2]; event.mPoints[0] = new Point(w, h); event.mPoints[1] = new Point(screenw, screenh); return event; } public static GeckoEvent createBroadcastEvent(String subject, String data) { GeckoEvent event = new GeckoEvent(BROADCAST); event.mCharacters = subject; event.mCharactersExtra = data; return event; } public static GeckoEvent createViewportEvent(ViewportMetrics viewport, DisplayPortMetrics displayPort) { GeckoEvent event = new GeckoEvent(VIEWPORT); event.mCharacters = "Viewport:Change"; PointF origin = viewport.getOrigin(); StringBuffer sb = new StringBuffer(256); sb.append("{ \"x\" : ").append(origin.x) .append(", \"y\" : ").append(origin.y) .append(", \"zoom\" : ").append(viewport.getZoomFactor()) .append(", \"displayPort\" :").append(displayPort.toJSON()) .append('}'); event.mCharactersExtra = sb.toString(); return event; } public static GeckoEvent createURILoadEvent(String uri) { GeckoEvent event = new GeckoEvent(LOAD_URI); event.mCharacters = uri; event.mCharactersExtra = ""; return event; } public static GeckoEvent createWebappLoadEvent(String uri) { GeckoEvent event = new GeckoEvent(LOAD_URI); event.mCharacters = uri; event.mCharactersExtra = "-webapp"; return event; } public static GeckoEvent createBookmarkLoadEvent(String uri) { GeckoEvent event = new GeckoEvent(LOAD_URI); event.mCharacters = uri; event.mCharactersExtra = "-bookmark"; return event; } public static GeckoEvent createVisitedEvent(String data) { GeckoEvent event = new GeckoEvent(VISITED); event.mCharacters = data; return event; } public static GeckoEvent createNetworkEvent(double bandwidth, boolean canBeMetered) { GeckoEvent event = new GeckoEvent(NETWORK_CHANGED); event.mBandwidth = bandwidth; event.mCanBeMetered = canBeMetered; return event; } public static GeckoEvent createScreenshotEvent(int tabId, int sx, int sy, int sw, int sh, int dx, int dy, int dw, int dh, int bw, int bh, int token, ByteBuffer buffer) { GeckoEvent event = new GeckoEvent(SCREENSHOT); event.mPoints = new Point[5]; event.mPoints[0] = new Point(sx, sy); event.mPoints[1] = new Point(sw, sh); event.mPoints[2] = new Point(dx, dy); event.mPoints[3] = new Point(dw, dh); event.mPoints[4] = new Point(bw, bh); event.mMetaState = tabId; event.mFlags = token; event.mBuffer = buffer; return event; } public static GeckoEvent createScreenOrientationEvent(short aScreenOrientation) { GeckoEvent event = new GeckoEvent(SCREENORIENTATION_CHANGED); event.mScreenOrientation = aScreenOrientation; return event; } public static GeckoEvent createStartPaintListentingEvent(int tabId) { GeckoEvent event = new GeckoEvent(PAINT_LISTEN_START_EVENT); event.mMetaState = tabId; return event; } }
/* * Copyright 2015 herd contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.finra.herd.swaggergen; import java.io.IOException; import java.lang.reflect.Method; import java.lang.reflect.Parameter; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.xml.bind.annotation.XmlType; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import io.swagger.models.Operation; import io.swagger.models.Path; import io.swagger.models.RefModel; import io.swagger.models.Response; import io.swagger.models.Swagger; import io.swagger.models.Tag; import io.swagger.models.parameters.BodyParameter; import io.swagger.models.parameters.PathParameter; import io.swagger.models.parameters.QueryParameter; import io.swagger.models.parameters.SerializableParameter; import io.swagger.models.properties.RefProperty; import org.apache.commons.lang3.BooleanUtils; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.logging.Log; import org.jboss.forge.roaster.Roaster; import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.Javadoc; import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.TagElement; import org.jboss.forge.roaster.model.JavaDocTag; import org.jboss.forge.roaster.model.source.AnnotationSource; import org.jboss.forge.roaster.model.source.JavaClassSource; import org.jboss.forge.roaster.model.source.JavaDocSource; import org.jboss.forge.roaster.model.source.MethodSource; import org.jboss.forge.roaster.model.source.ParameterSource; import org.springframework.core.io.Resource; import org.springframework.core.io.support.PathMatchingResourcePatternResolver; import org.springframework.core.io.support.ResourcePatternResolver; import org.springframework.core.type.classreading.CachingMetadataReaderFactory; import org.springframework.core.type.classreading.MetadataReader; import org.springframework.util.ClassUtils; import org.springframework.util.CollectionUtils; import org.springframework.util.SystemPropertyUtils; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; /** * Finds and process REST controllers. */ public class RestControllerProcessor { // The log to use for logging purposes. @SuppressWarnings("PMD.ProperLogger") // Logger is passed into this method from Mojo base class. private Log log; // The Swagger metadata. private Swagger swagger; // The REST Java package. private String restJavaPackage; // The tag pattern to determine REST controller names. private Pattern tagPattern; // The map of Java class names to their respective source class information. private Map<String, JavaClassSource> sourceMap = new HashMap<>(); // The classes that we will create examples for. private Set<String> exampleClassNames = new HashSet<>(); // The model error class. private Class<?> modelErrorClass; // A set of operation Id's to keep track of so we don't create duplicates. private Set<String> operationIds = new HashSet<>(); /** * Instantiates a REST controller process which finds and processes REST controllers. * * @param log the log * @param swagger the Swagger metadata * @param restJavaPackage the REST Java package. * @param tagPatternTemplate the tag pattern template. * @param modelErrorClass the model error class. * * @throws MojoExecutionException if any problems were encountered. */ public RestControllerProcessor(Log log, Swagger swagger, String restJavaPackage, String tagPatternTemplate, Class<?> modelErrorClass) throws MojoExecutionException { this.log = log; this.swagger = swagger; this.restJavaPackage = restJavaPackage; this.modelErrorClass = modelErrorClass; // Create the tag pattern based on the parameter. tagPattern = Pattern.compile(tagPatternTemplate); findAndProcessRestControllers(); } /** * Finds all the REST controllers within the configured REST Java package and process the REST methods within each one. * * @throws MojoExecutionException if any errors were encountered. */ private void findAndProcessRestControllers() throws MojoExecutionException { try { log.debug("Finding and processing REST controllers."); // Loop through each resources and process each one. for (Resource resource : ResourceUtils .getResources(ResourcePatternResolver.CLASSPATH_ALL_URL_PREFIX + restJavaPackage.replace('.', '/') + "/**/*.java")) { if (resource.isReadable()) { JavaClassSource javaClassSource = Roaster.parse(JavaClassSource.class, resource.getInputStream()); sourceMap.put(javaClassSource.getName(), javaClassSource); log.debug("Found Java source class \"" + javaClassSource.getName() + "\"."); } } // Loop through each controller resources and process each one. for (Resource resource : ResourceUtils.getResources(ResourcePatternResolver.CLASSPATH_ALL_URL_PREFIX + ClassUtils.convertClassNameToResourcePath(SystemPropertyUtils.resolvePlaceholders(restJavaPackage)) + "/**/*.class")) { if (resource.isReadable()) { // Create a resource resolver to fetch resources. MetadataReader metadataReader = new CachingMetadataReaderFactory(new PathMatchingResourcePatternResolver()).getMetadataReader(resource); Class<?> clazz = Class.forName(metadataReader.getClassMetadata().getClassName()); processRestControllerClass(clazz); } } } catch (ClassNotFoundException | IOException e) { throw new MojoExecutionException("Error processing REST classes. Reason: " + e.getMessage(), e); } } /** * Processes a Spring MVC REST controller class that is annotated with RestController. Also collects any required model objects based on parameters and * return types of each endpoint into the specified model classes set. * * @param clazz the class to process * * @throws MojoExecutionException if any errors were encountered. */ private void processRestControllerClass(Class<?> clazz) throws MojoExecutionException { // Get the Java class source information. JavaClassSource javaClassSource = sourceMap.get(clazz.getSimpleName()); if (javaClassSource == null) { throw new MojoExecutionException("No source resource found for class \"" + clazz.getName() + "\"."); } Api api = clazz.getAnnotation(Api.class); boolean hidden = api != null && api.hidden(); if ((clazz.getAnnotation(RestController.class) != null) && (!hidden)) { log.debug("Processing RestController class \"" + clazz.getName() + "\"."); // Default the tag name to the simple class name. String tagName = clazz.getSimpleName(); // See if the "Api" annotation exists. if (api != null && api.tags().length > 0) { // The "Api" annotation was found so use it's configured tag. tagName = api.tags()[0]; } else { // No "Api" annotation so try to get the tag name from the class name. If not, we will stick with the default simple class name. Matcher matcher = tagPattern.matcher(clazz.getSimpleName()); if (matcher.find()) { // If our class has the form tagName = matcher.group("tag"); } } log.debug("Using tag name \"" + tagName + "\"."); // Add the tag and process each method. swagger.addTag(new Tag().name(tagName)); for (Method method : clazz.getDeclaredMethods()) { // Get the method source information. List<Class<?>> methodParamClasses = new ArrayList<>(); for (Parameter parameter : method.getParameters()) { methodParamClasses.add(parameter.getType()); } MethodSource<JavaClassSource> methodSource = javaClassSource.getMethod(method.getName(), methodParamClasses.toArray(new Class<?>[methodParamClasses.size()])); if (methodSource == null) { throw new MojoExecutionException( "No method source found for class \"" + clazz.getName() + "\" and method name \"" + method.getName() + "\"."); } // Process the REST controller method along with its source information. processRestControllerMethod(method, clazz.getAnnotation(RequestMapping.class), tagName, methodSource); } } else { log.debug("Skipping class \"" + clazz.getName() + "\" because it is either not a RestController or it is hidden."); } } /** * Processes a method in a REST controller which represents an endpoint, that is, it is annotated with RequestMapping. * * @param method the method. * @param classRequestMapping the parent. * @param tagName the tag name. * @param methodSource the method source information. * * @throws MojoExecutionException if any errors were encountered. */ @SuppressWarnings("unchecked") // CollectionUtils doesn't work with generics. private void processRestControllerMethod(Method method, RequestMapping classRequestMapping, String tagName, MethodSource<JavaClassSource> methodSource) throws MojoExecutionException { log.debug("Processing method \"" + method.getName() + "\"."); // Build a map of each parameter name to its description from the method Javadoc (i.e. all @param and @return values). Map<String, String> methodParamDescriptions = new HashMap<>(); JavaDocSource<MethodSource<JavaClassSource>> javaDocSource = methodSource.getJavaDoc(); List<JavaDocTag> tags = javaDocSource.getTags(); for (JavaDocTag javaDocTag : tags) { processJavaDocTag(javaDocTag, methodParamDescriptions); } List<String> produces = Collections.emptyList(); List<String> consumes = Collections.emptyList(); List<RequestMethod> requestMethods = Collections.emptyList(); List<String> uris = Collections.emptyList(); // If a class request mapping exists, use it as the default. if (classRequestMapping != null) { produces = CollectionUtils.arrayToList(classRequestMapping.produces()); consumes = CollectionUtils.arrayToList(classRequestMapping.consumes()); requestMethods = CollectionUtils.arrayToList(classRequestMapping.method()); uris = CollectionUtils.arrayToList(classRequestMapping.value()); } // Get the API Operation and see if this endpoint is hidden. ApiOperation apiOperation = method.getAnnotation(ApiOperation.class); boolean hidden = apiOperation != null && apiOperation.hidden(); // Only process methods that have a RequestMapping annotation. RequestMapping methodRequestMapping = method.getAnnotation(RequestMapping.class); if ((methodRequestMapping != null) && (!hidden)) { log.debug("Method \"" + method.getName() + "\" is a RequestMapping."); // Override values with method level ones if present. requestMethods = getClassOrMethodValue(requestMethods, CollectionUtils.arrayToList(methodRequestMapping.method())); uris = getClassOrMethodValue(uris, CollectionUtils.arrayToList(methodRequestMapping.value())); produces = getClassOrMethodValue(produces, CollectionUtils.arrayToList(methodRequestMapping.produces())); consumes = getClassOrMethodValue(consumes, CollectionUtils.arrayToList(methodRequestMapping.consumes())); // Perform validation. if (requestMethods.isEmpty()) { log.warn("No request method defined for method \"" + method.getName() + "\". Skipping..."); return; } if (uris.isEmpty()) { log.warn("No URI defined for method \"" + method.getName() + "\". Skipping..."); return; } if (uris.size() > 1) { log.warn(uris.size() + " URI's found for method \"" + method.getName() + "\". Only processing the first one."); } if (requestMethods.size() > 1) { log.warn(uris.size() + " request methods found for method \"" + method.getName() + "\". Only processing the first one."); } String uri = uris.get(0).trim(); Path path = swagger.getPath(uri); if (path == null) { path = new Path(); swagger.path(uri, path); } // Get the method summary from the ApiOperation annotation or use the method name if the annotation doesn't exist. String methodSummary = method.getName(); if (apiOperation != null) { methodSummary = apiOperation.value(); } Operation operation = new Operation(); operation.tag(tagName); operation.summary(methodSummary); if (javaDocSource.getText() != null) { // Process the method description. Javadoc javadoc = (Javadoc) javaDocSource.getInternal(); List<TagElement> tagList = javadoc.tags(); StringBuilder stringBuilder = new StringBuilder(); for (TagElement tagElement : tagList) { // Tags that have a null tag name are related to the overall method description (as opposed to the individual parameters, etc.). // In most cases, there should be only 1, but perhaps that are other cases that could have more. This general logic comes from // JavaDocImpl.getText(). Although that implementation also filters out on TextElements, we'll grab them all in case there's something // else available (e.g. @link, etc.). if (tagElement.getTagName() == null) { processFragments(tagElement.fragments(), stringBuilder); } } // The string builder has the final method text to use. operation.description(stringBuilder.toString()); setOperationId(tagName, method, operation); } if (!produces.isEmpty()) { operation.setProduces(produces); } if (!consumes.isEmpty()) { operation.setConsumes(consumes); } path.set(requestMethods.get(0).name().toLowerCase(), operation); // HTTP method MUST be lower cased // Process each method parameter. // We are using the parameter source here instead of the reflection method's parameters so we can match it to it's Javadoc descriptions. // The reflection approach only uses auto-generated parameter names (e.g. arg0, arg1, etc.) which we can't match to Javadoc parameter // names. for (ParameterSource<JavaClassSource> parameterSource : methodSource.getParameters()) { processRestMethodParameter(parameterSource, operation, methodParamDescriptions); } // Process the return value. processRestMethodReturnValue(method.getReturnType(), operation, methodParamDescriptions.get("@return")); } else { log.debug("Skipping method \"" + method.getName() + "\" because it is either not a RequestMapping or it is hidden."); } } /** * Processes the Java doc tag (i.e. the parameters and return value). * * @param javaDocTag the Java doc tag * @param methodParamDescriptions the map of method parameters to update. */ private void processJavaDocTag(JavaDocTag javaDocTag, Map<String, String> methodParamDescriptions) { // Get the list of fragments which are the parts of an individual Javadoc parameter or return value. TagElement tagElement = (TagElement) javaDocTag.getInternal(); List fragments = tagElement.fragments(); // We need to populate the parameter name and get the list of fragments that contain the actual text. String paramName = ""; List subFragments = new ArrayList<>(); if (javaDocTag.getName().equals("@param")) { // In the case of @param, the first fragment is the name and the rest make up the description. paramName = String.valueOf(fragments.get(0)); subFragments = fragments.subList(1, fragments.size()); } else if (javaDocTag.getName().equals("@return")) { // In the case of @return, we'll use "@return" itself for the name and all the fragments make up the description. paramName = "@return"; subFragments = fragments; } // Process all fragments and place the results in the map. StringBuilder stringBuilder = new StringBuilder(); processFragments(subFragments, stringBuilder); methodParamDescriptions.put(paramName, stringBuilder.toString()); } /** * Processes all the fragments that make up the description. This needs to be done manually as opposed to using the higher level roaster text retrieval * methods since those eat carriage return characters and don't replace them with a space. The result is the the last word of one line and the first word of * the next line are placed together with no separating space. This method builds it manually to fix this issue. * <p> * This method updates a passed in stringBuilder so callers can process multiple list of fragments and use the same stringBuilder to hold the processed * contents of all of them. * * @param fragments the list of fragments. * @param stringBuilder the string builder to update. */ private void processFragments(List fragments, StringBuilder stringBuilder) { // Loop through the fragments. for (Object fragment : fragments) { // Get and trim this fragment. String fragmentString = String.valueOf(fragment).trim(); // If we have already processed a fragment, add a space. if (stringBuilder.length() > 0) { stringBuilder.append(' '); } // Append this fragment to the string builder. stringBuilder.append(fragmentString); } } /** * Sets an operation Id on the operation based on the specified method. The operation Id takes on the format of * "~tagNameWithoutSpaces~.~methodName~[~counter~]". * * @param tagName the tag name for the class. * @param method the method for the operation. * @param operation the operation to set the Id on. */ private void setOperationId(String tagName, Method method, Operation operation) { // Initialize the counter and the "base" operation Id (i.e. the one without the counter) and default the operation Id we're going to use to the base // one. int count = 0; String baseOperationId = tagName.replaceAll(" ", "") + "." + method.getName(); String operationId = baseOperationId; // As long as the operation Id is a duplicate with one used before, add a counter to the end of it until we find one that hasn't been used before. while (operationIds.contains(operationId)) { count++; operationId = baseOperationId + count; } // Add our new operation Id to the set so we don't use it again and set the operation Id on the operation itself. operationIds.add(operationId); operation.setOperationId(operationId); } /** * Process a REST method parameter. * * @param parameterSource the parameter source information. * @param operation the Swagger operation. * @param methodParamDescriptions the method parameter Javadoc descriptions. * * @throws MojoExecutionException if any problems were encountered. */ private void processRestMethodParameter(ParameterSource<JavaClassSource> parameterSource, Operation operation, Map<String, String> methodParamDescriptions) throws MojoExecutionException { log.debug("Processing parameter \"" + parameterSource.getName() + "\"."); try { AnnotationSource<JavaClassSource> requestParamAnnotationSource = parameterSource.getAnnotation(RequestParam.class); AnnotationSource<JavaClassSource> requestBodyAnnotationSource = parameterSource.getAnnotation(RequestBody.class); AnnotationSource<JavaClassSource> pathVariableAnnotationSource = parameterSource.getAnnotation(PathVariable.class); if (requestParamAnnotationSource != null) { log.debug("Parameter \"" + parameterSource.getName() + "\" is a RequestParam."); QueryParameter queryParameter = new QueryParameter(); queryParameter.name(requestParamAnnotationSource.getStringValue("value").trim()); queryParameter.setRequired(BooleanUtils.toBoolean(requestParamAnnotationSource.getStringValue("required"))); setParameterType(parameterSource, queryParameter); operation.parameter(queryParameter); setParamDescription(parameterSource, methodParamDescriptions, queryParameter); } else if (requestBodyAnnotationSource != null) { log.debug("Parameter \"" + parameterSource.getName() + "\" is a RequestBody."); // Add the class name to the list of classes which we will create an example for. exampleClassNames.add(parameterSource.getType().getSimpleName()); BodyParameter bodyParameter = new BodyParameter(); XmlType xmlType = getXmlType(Class.forName(parameterSource.getType().getQualifiedName())); String name = xmlType.name().trim(); bodyParameter.name(name); bodyParameter.setRequired(true); bodyParameter.setSchema(new RefModel(name)); operation.parameter(bodyParameter); setParamDescription(parameterSource, methodParamDescriptions, bodyParameter); } else if (pathVariableAnnotationSource != null) { log.debug("Parameter \"" + parameterSource.getName() + "\" is a PathVariable."); PathParameter pathParameter = new PathParameter(); pathParameter.name(pathVariableAnnotationSource.getStringValue("value").trim()); setParameterType(parameterSource, pathParameter); operation.parameter(pathParameter); setParamDescription(parameterSource, methodParamDescriptions, pathParameter); } } catch (ClassNotFoundException e) { throw new MojoExecutionException("Unable to instantiate class \"" + parameterSource.getType().getQualifiedName() + "\". Reason: " + e.getMessage(), e); } } /** * Converts the given Java parameter type into a Swagger param type and sets it into the given Swagger param. * * @param parameterSource the parameter source. * @param swaggerParam the Swagger parameter. */ private void setParameterType(ParameterSource<JavaClassSource> parameterSource, SerializableParameter swaggerParam) throws MojoExecutionException { try { String typeName = parameterSource.getType().getQualifiedName(); if (String.class.getName().equals(typeName)) { swaggerParam.setType("string"); } else if (Integer.class.getName().equals(typeName) || Long.class.getName().equals(typeName)) { swaggerParam.setType("integer"); } else if (Boolean.class.getName().equals(typeName)) { swaggerParam.setType("boolean"); } else { // See if the type is an enum. Enum<?>[] enumValues = (Enum<?>[]) Class.forName(parameterSource.getType().getQualifiedName()).getEnumConstants(); if (enumValues != null) { swaggerParam.setType("string"); swaggerParam.setEnum(new ArrayList<>()); for (Enum<?> enumEntry : enumValues) { swaggerParam.getEnum().add(enumEntry.name()); } } else { // Assume "string" for all other types since everything is ultimately a string. swaggerParam.setType("string"); } } log.debug("Parameter \"" + parameterSource.getName() + "\" is a type \"" + swaggerParam.getType() + "\"."); } catch (ClassNotFoundException e) { throw new MojoExecutionException("Unable to instantiate class \"" + parameterSource.getType().getQualifiedName() + "\". Reason: " + e.getMessage(), e); } } /** * Sets a Swagger parameter description. * * @param parameterSource the parameter source information. * @param methodParamDescriptions the map of parameter names to their descriptions. * @param swaggerParam the Swagger parameter metadata to update. */ private void setParamDescription(ParameterSource<JavaClassSource> parameterSource, Map<String, String> methodParamDescriptions, io.swagger.models.parameters.Parameter swaggerParam) { // Set the parameter description if one was found. String parameterDescription = methodParamDescriptions.get(parameterSource.getName()); log.debug("Parameter \"" + parameterSource.getName() + "\" has description\"" + parameterDescription + "\"."); if (parameterDescription != null) { swaggerParam.setDescription(parameterDescription); } } /** * Processes the return value of a RequestMapping annotated method. * * @param returnType the return type. * @param operation the operation. * @param returnDescription the description of the return value. * * @throws MojoExecutionException if the return type isn't an XmlType. */ private void processRestMethodReturnValue(Class<?> returnType, Operation operation, String returnDescription) throws MojoExecutionException { log.debug("Processing REST method return value \"" + returnType.getName() + "\"."); // Add the class name to the list of classes which we will create an example for. exampleClassNames.add(returnType.getSimpleName()); // Add the success response operation.response(200, new Response().description(returnDescription == null ? "Success" : returnDescription) .schema(new RefProperty(getXmlType(returnType).name().trim()))); // If we have an error class, add that as the default response. if (modelErrorClass != null) { operation.defaultResponse(new Response().description("General Error").schema(new RefProperty(getXmlType(modelErrorClass).name().trim()))); } } /** * Gets the method level object if not empty or uses the class level if the method level is empty. * * @param classLevel the class level object. * @param methodLevel the method level object. * @param <T> the type of the object. * * @return the class or method level object. */ private <T extends List<?>> T getClassOrMethodValue(T classLevel, T methodLevel) { return (methodLevel == null || methodLevel.isEmpty()) ? classLevel : methodLevel; } /** * Gets the XmlType annotation from the specified class. If the XmlType doesn't exist, an exception will be thrown. * * @param clazz the class with the XmlType annotation. * * @return the XmlType. * @throws MojoExecutionException if the class isn't an XmlType. */ private XmlType getXmlType(Class<?> clazz) throws MojoExecutionException { XmlType xmlType = clazz.getAnnotation(XmlType.class); if (xmlType == null) { throw new MojoExecutionException("Class \"" + clazz.getName() + "\" is not of XmlType."); } return xmlType; } /** * Gets the example class names. * * @return the example class names. */ public Set<String> getExampleClassNames() { return exampleClassNames; } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.java.decompiler.modules.decompiler.vars; import org.jetbrains.java.decompiler.code.CodeConstants; import org.jetbrains.java.decompiler.main.DecompilerContext; import org.jetbrains.java.decompiler.main.collectors.VarNamesCollector; import org.jetbrains.java.decompiler.modules.decompiler.exps.AssignmentExprent; import org.jetbrains.java.decompiler.modules.decompiler.exps.Exprent; import org.jetbrains.java.decompiler.modules.decompiler.exps.VarExprent; import org.jetbrains.java.decompiler.modules.decompiler.stats.CatchAllStatement; import org.jetbrains.java.decompiler.modules.decompiler.stats.CatchStatement; import org.jetbrains.java.decompiler.modules.decompiler.stats.DoStatement; import org.jetbrains.java.decompiler.modules.decompiler.stats.Statement; import org.jetbrains.java.decompiler.struct.StructClass; import org.jetbrains.java.decompiler.struct.StructMethod; import org.jetbrains.java.decompiler.struct.gen.MethodDescriptor; import java.util.*; import java.util.Map.Entry; public class VarDefinitionHelper { private final HashMap<Integer, Statement> mapVarDefStatements; // statement.id, defined vars private final HashMap<Integer, HashSet<Integer>> mapStatementVars; private final HashSet<Integer> implDefVars; private final VarProcessor varproc; public VarDefinitionHelper(Statement root, StructMethod mt, VarProcessor varproc) { mapVarDefStatements = new HashMap<Integer, Statement>(); mapStatementVars = new HashMap<Integer, HashSet<Integer>>(); implDefVars = new HashSet<Integer>(); this.varproc = varproc; VarNamesCollector vc = DecompilerContext.getVarNamesCollector(); boolean thisvar = !mt.hasModifier(CodeConstants.ACC_STATIC); MethodDescriptor md = MethodDescriptor.parseDescriptor(mt.getDescriptor()); int paramcount = 0; if (thisvar) { paramcount = 1; } paramcount += md.params.length; // method parameters are implicitly defined int varindex = 0; for (int i = 0; i < paramcount; i++) { implDefVars.add(varindex); varproc.setVarName(new VarVersionPair(varindex, 0), vc.getFreeName(varindex)); if (thisvar) { if (i == 0) { varindex++; } else { varindex += md.params[i - 1].stackSize; } } else { varindex += md.params[i].stackSize; } } if (thisvar) { StructClass current_class = (StructClass)DecompilerContext.getProperty(DecompilerContext.CURRENT_CLASS); varproc.getThisVars().put(new VarVersionPair(0, 0), current_class.qualifiedName); varproc.setVarName(new VarVersionPair(0, 0), "this"); vc.addName("this"); } // catch variables are implicitly defined LinkedList<Statement> stack = new LinkedList<Statement>(); stack.add(root); while (!stack.isEmpty()) { Statement st = stack.removeFirst(); List<VarExprent> lstVars = null; if (st.type == Statement.TYPE_CATCHALL) { lstVars = ((CatchAllStatement)st).getVars(); } else if (st.type == Statement.TYPE_TRYCATCH) { lstVars = ((CatchStatement)st).getVars(); } if (lstVars != null) { for (VarExprent var : lstVars) { implDefVars.add(var.getIndex()); varproc.setVarName(new VarVersionPair(var), vc.getFreeName(var.getIndex())); var.setDefinition(true); } } stack.addAll(st.getStats()); } initStatement(root); } public void setVarDefinitions() { VarNamesCollector vc = DecompilerContext.getVarNamesCollector(); for (Entry<Integer, Statement> en : mapVarDefStatements.entrySet()) { Statement stat = en.getValue(); Integer index = en.getKey(); if (implDefVars.contains(index)) { // already implicitly defined continue; } varproc.setVarName(new VarVersionPair(index.intValue(), 0), vc.getFreeName(index)); // special case for if (stat.type == Statement.TYPE_DO) { DoStatement dstat = (DoStatement)stat; if (dstat.getLooptype() == DoStatement.LOOP_FOR) { if (dstat.getInitExprent() != null && setDefinition(dstat.getInitExprent(), index)) { continue; } else { List<Exprent> lstSpecial = Arrays.asList(dstat.getConditionExprent(), dstat.getIncExprent()); for (VarExprent var : getAllVars(lstSpecial)) { if (var.getIndex() == index.intValue()) { stat = stat.getParent(); break; } } } } } Statement first = findFirstBlock(stat, index); List<Exprent> lst; if (first == null) { lst = stat.getVarDefinitions(); } else if (first.getExprents() == null) { lst = first.getVarDefinitions(); } else { lst = first.getExprents(); } boolean defset = false; // search for the first assignement to var [index] int addindex = 0; for (Exprent expr : lst) { if (setDefinition(expr, index)) { defset = true; break; } else { boolean foundvar = false; for (Exprent exp : expr.getAllExprents(true)) { if (exp.type == Exprent.EXPRENT_VAR && ((VarExprent)exp).getIndex() == index) { foundvar = true; break; } } if (foundvar) { break; } } addindex++; } if (!defset) { VarExprent var = new VarExprent(index.intValue(), varproc.getVarType(new VarVersionPair(index.intValue(), 0)), varproc); var.setDefinition(true); lst.add(addindex, var); } } } // ***************************************************************************** // private methods // ***************************************************************************** private Statement findFirstBlock(Statement stat, Integer varindex) { LinkedList<Statement> stack = new LinkedList<Statement>(); stack.add(stat); while (!stack.isEmpty()) { Statement st = stack.remove(0); if (stack.isEmpty() || mapStatementVars.get(st.id).contains(varindex)) { if (st.isLabeled() && !stack.isEmpty()) { return st; } if (st.getExprents() != null) { return st; } else { stack.clear(); switch (st.type) { case Statement.TYPE_SEQUENCE: stack.addAll(0, st.getStats()); break; case Statement.TYPE_IF: case Statement.TYPE_ROOT: case Statement.TYPE_SWITCH: case Statement.TYPE_SYNCRONIZED: stack.add(st.getFirst()); break; default: return st; } } } } return null; } private Set<Integer> initStatement(Statement stat) { HashMap<Integer, Integer> mapCount = new HashMap<Integer, Integer>(); List<VarExprent> condlst; if (stat.getExprents() == null) { // recurse on children statements List<Integer> childVars = new ArrayList<Integer>(); List<Exprent> currVars = new ArrayList<Exprent>(); for (Object obj : stat.getSequentialObjects()) { if (obj instanceof Statement) { Statement st = (Statement)obj; childVars.addAll(initStatement(st)); if (st.type == DoStatement.TYPE_DO) { DoStatement dost = (DoStatement)st; if (dost.getLooptype() != DoStatement.LOOP_FOR && dost.getLooptype() != DoStatement.LOOP_DO) { currVars.add(dost.getConditionExprent()); } } else if (st.type == DoStatement.TYPE_CATCHALL) { CatchAllStatement fin = (CatchAllStatement)st; if (fin.isFinally() && fin.getMonitor() != null) { currVars.add(fin.getMonitor()); } } } else if (obj instanceof Exprent) { currVars.add((Exprent)obj); } } // children statements for (Integer index : childVars) { Integer count = mapCount.get(index); if (count == null) { count = new Integer(0); } mapCount.put(index, new Integer(count.intValue() + 1)); } condlst = getAllVars(currVars); } else { condlst = getAllVars(stat.getExprents()); } // this statement for (VarExprent var : condlst) { mapCount.put(new Integer(var.getIndex()), new Integer(2)); } HashSet<Integer> set = new HashSet<Integer>(mapCount.keySet()); // put all variables defined in this statement into the set for (Entry<Integer, Integer> en : mapCount.entrySet()) { if (en.getValue().intValue() > 1) { mapVarDefStatements.put(en.getKey(), stat); } } mapStatementVars.put(stat.id, set); return set; } private static List<VarExprent> getAllVars(List<Exprent> lst) { List<VarExprent> res = new ArrayList<VarExprent>(); List<Exprent> listTemp = new ArrayList<Exprent>(); for (Exprent expr : lst) { listTemp.addAll(expr.getAllExprents(true)); listTemp.add(expr); } for (Exprent exprent : listTemp) { if (exprent.type == Exprent.EXPRENT_VAR) { res.add((VarExprent)exprent); } } return res; } private static boolean setDefinition(Exprent expr, Integer index) { if (expr.type == Exprent.EXPRENT_ASSIGNMENT) { Exprent left = ((AssignmentExprent)expr).getLeft(); if (left.type == Exprent.EXPRENT_VAR) { VarExprent var = (VarExprent)left; if (var.getIndex() == index.intValue()) { var.setDefinition(true); return true; } } } return false; } }
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.edgent.connectors.mqtt.iot; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.HashSet; import java.util.Properties; import java.util.Set; import org.apache.edgent.connectors.iot.IotDevice; import org.apache.edgent.connectors.mqtt.MqttConfig; import org.apache.edgent.connectors.mqtt.MqttStreams; import org.apache.edgent.function.Function; import org.apache.edgent.function.UnaryOperator; import org.apache.edgent.topology.TSink; import org.apache.edgent.topology.TStream; import org.apache.edgent.topology.Topology; import org.apache.edgent.topology.json.JsonFunctions; import com.google.gson.JsonObject; /** * An MQTT based Edgent {@link IotDevice} connector. * <p> * The MQTT {@code IotDevice} is an abstraction on top of * the {@link MqttStreams} connector. * <p> * The connector doesn't presume a particular pattern for * Device MQTT "event" and "command" topics though default * patterns are provided. * <p> * The MQTT message content for device events and device commands must be JSON. * The contents of the JSON are under the control of the collaborating MQTT clients. * Typically a device to defines its event and command schemas * and the other clients to adapt accordingly. * See {@link #commands(String...)} and {@link #events(TStream, String, int) events()} * for a description of how MQTT messages are converted to and from stream tuples. * <p> * Connector configuration Properties fall into two categories: * <ul> * <li>MQTT Device abstraction properties</li> * <li>Base MQTT connector properties - see {@link MqttConfig#fromProperties(Properties)} * </ul> * * <h3>Device properties</h3> * <ul> * <li>mqttDevice.id - Required. An identifier that uniquely identifies * the device in the device event and device command MQTT topic namespaces. * </li> * <li>mqttDevice.topic.prefix - A optional prefix that by default is used when * composing device event and command MQTT topics, and the client's MQTT * clientId. The default is no prefix.</li> * <li>mqttDevice.event.topic.pattern - Optional. The topic pattern used * for MQTT device event topics. * Defaults to {@code {mqttDevice.topic.prefix}id/{mqttDevice.id}/evt/{EVENTID}/fmt/json} * The pattern must include {EVENTID} and must end with "/fmt/json". * </li> * <li>mqttDevice.command.topic.pattern - Optional. The topic pattern used * for MQTT device command topics. * Defaults to {@code {mqttDevice.topic.prefix}id/{mqttDevice.id}/cmd/{COMMAND}/fmt/json} * The pattern must include {COMMAND} and must end with "/fmt/json". * </li> * <li>mqttDevice.command.qos - An optional MQTT QoS value for commands. Defaults to 0.</li> * <li>mqttDevice.events.retain - Optional MQTT "retain" behavior for published events. Defaults to false.</li> * <li>mqttDevice.mqtt.clientId - Optional value to use for the MQTT clientId. * Defaults to {mqttDevice.topic.prefix}id/{mqttDevice.id}.</li> * </ul> * Sample use: * <pre>{@code * // assuming a properties file containing at least: * // mqttDevice.id=012345678 * // mqtt.serverURLs=tcp://myMqttBrokerHost:1883 * * String propsPath = <path to properties file>; * Properties properties = new Properties(); * properties.load(Files.newBufferedReader(new File(propsPath).toPath())); * Topology t = new DirectProvider(); * MqttDevice mqttDevice = new MqttDevice(t, properties); * * // publish JSON "status" device event tuples every hour * TStream<JsonObject> myStatusEvents = t.poll(myGetStatusAsJson(), 1, TimeUnit.HOURS); * mqttDevice.events(myStatusEvents, "status", QoS.FIRE_AND_FORGET); * * // handle a device command. In this example the payload is expected * // to be JSON and have a "value" property containing the new threshold. * mqttDevice.command("setSensorThreshold") * .sink(json -> setSensorThreshold(json.get(CMD_PAYLOAD).getAsJsonObject().get("value").getAsString()); * }</pre> */ public class MqttDevice implements IotDevice { private final Topology topology; private final String deviceId; private String topicPrefix = ""; private String clientId = "{mqttDevice.topic.prefix}id/{mqttDevice.id}"; private String evtTopic = "{mqttDevice.topic.prefix}id/{mqttDevice.id}/evt/{EVENTID}/fmt/json"; private String cmdTopic = "{mqttDevice.topic.prefix}id/{mqttDevice.id}/cmd/{COMMAND}/fmt/json"; private int commandQoS = 0; private boolean retainEvents = false; private final MqttConfig mqttConfig; private final MqttStreams connector; private TStream<JsonObject> commandStream; /** * Create an MqttDevice connector. * <p> * All configuration information comes from {@code properties}. * * @param topology topology to add the connector to. * @param properties connector properties. */ public MqttDevice(Topology topology, Properties properties) { this(topology, properties, null); } /** * Create an MqttDevice connector. * <p> * Uses {@code mattConfig} for the base MQTT connector configuration * and uses {@code properties} only for MQTT Device properties. * * @param topology topology to add the connector to. * @param properties connector properties. Properties beyond those * noted in the Device properties section above are ignored. * @param mqttConfig base MQTT configuration. may be null. */ public MqttDevice(Topology topology, Properties properties, MqttConfig mqttConfig) { this.topology = topology; this.deviceId = properties.getProperty("mqttDevice.id"); if (deviceId == null || deviceId.isEmpty()) throw new IllegalArgumentException("mqttDevice.id"); String cqos = properties.getProperty("mqttDevice.command.qos", Integer.valueOf(commandQoS).toString()); commandQoS = Integer.valueOf(cqos); String eretain = properties.getProperty("mqttDevice.events.retain", Boolean.valueOf(retainEvents).toString()); retainEvents = Boolean.valueOf(eretain); topicPrefix = properties.getProperty("mqttDevice.topic.prefix", topicPrefix); clientId = properties.getProperty("mqttDevice.mqtt.clientId", clientId); evtTopic = properties.getProperty("mqttDevice.event.topic.pattern", evtTopic); if (!evtTopic.endsWith("/fmt/json")) throw new IllegalArgumentException("mqttDevice.event.topic.pattern"); cmdTopic = properties.getProperty("mqttDevice.command.topic.pattern", cmdTopic); if (!cmdTopic.endsWith("/fmt/json")) throw new IllegalArgumentException("mqttDevice.command.topic.pattern"); initVars(); if (mqttConfig == null) { mqttConfig = MqttConfig.fromProperties(properties); mqttConfig.setClientId(clientId); } this.mqttConfig = mqttConfig; this.connector = new MqttStreams(topology, () -> this.mqttConfig); } private void initVars() { clientId = clientId .replace("{mqttDevice.topic.prefix}", topicPrefix) .replace("{mqttDevice.id}", deviceId); evtTopic = evtTopic .replace("{mqttDevice.topic.prefix}", topicPrefix) .replace("{mqttDevice.id}", deviceId); cmdTopic = cmdTopic .replace("{mqttDevice.topic.prefix}", topicPrefix) .replace("{mqttDevice.id}", deviceId); } /** * Get the MQTT topic for an device event. * @param eventId the event id. * if null, returns a topic filter for all of the device's events. * @return the topic */ public String eventTopic(String eventId) { if (eventId == null) { eventId = "+"; // retain the trailing fmt/json } return evtTopic.replace("{EVENTID}", eventId); } /** * Get the MQTT topic for a command. * @param command the command id. * if null, returns a topic filter for all of the device's commands. * @return the topic */ public String commandTopic(String command) { if (command == null) { command = "+"; // retain the trailing fmt/json } return cmdTopic.replace("{COMMAND}", command); } /** * Get the device's {@link MqttConfig} * @return the config */ public MqttConfig getMqttConfig() { return mqttConfig; } /** * {@inheritDoc} * * <p>The event is published to the configured MQTT {@code mqttDevice.event.topic.pattern}, * as described in the above class documentation, substituting the value returned * by the {@code eventId} function for "{EVENTID}" in the pattern. * The MQTT message's payload is the JSON representation * of the JsonObject stream tuple. */ @Override public TSink<JsonObject> events(TStream<JsonObject> stream, Function<JsonObject, String> eventId, UnaryOperator<JsonObject> payload, Function<JsonObject, Integer> qos) { Function<JsonObject, String> topic = jo -> eventTopic(eventId.apply(jo)); Function<JsonObject,byte[]> payloadFn = jo -> JsonFunctions.asBytes().apply(payload.apply(jo)); return connector.publish(stream, topic, payloadFn, qos, jo -> retainEvents); } /** * {@inheritDoc} * * <p>The event is published to the configured MQTT {@code mqttDevice.event.topic.pattern}, * as described in the above class documentation, substituting the {@code eventId} for * "{EVENTID}" in the pattern. * The MQTT message's payload is the JSON representation * of the JsonObject stream tuple. */ @Override public TSink<JsonObject> events(TStream<JsonObject> stream, String eventId, int qos) { return events(stream, jo -> eventId, jo -> jo, jo -> qos); } /** * {@inheritDoc} * * <p> * Subscribes to the configured MQTT {@code mqttDevice.command.topic.pattern} * as described in the above class documentation. * The received MQTT message's payload is required to be JSON. * The message's JSON payload is converted to a JsonObject and * set as the {@code payload} key's value in the stream tuple JsonObject. */ @Override public TStream<JsonObject> commands(String... commands) { TStream<JsonObject> all = allCommands(); if (commands.length != 0) { Set<String> uniqueCommands = new HashSet<>(); uniqueCommands.addAll(Arrays.asList(commands)); all = all.filter(jo -> uniqueCommands.contains(jo.get(CMD_ID).getAsString())); } return all; } private TStream<JsonObject> allCommands() { if (commandStream == null) { String topicFilter = commandTopic(null); commandStream = connector.subscribe(topicFilter, commandQoS, (topic, payload) -> { JsonObject jo = new JsonObject(); jo.addProperty(CMD_DEVICE, deviceId); jo.addProperty(CMD_ID, extractCmd(topic)); jo.addProperty(CMD_TS, System.currentTimeMillis()); String fmt = extractCmdFmt(topic); jo.addProperty(CMD_FORMAT, fmt); if ("json".equals(fmt)) { jo.add(CMD_PAYLOAD, JsonFunctions.fromBytes().apply(payload)); } else { jo.addProperty(CMD_PAYLOAD, new String(payload, StandardCharsets.UTF_8)); } return jo; }) .tag("allDeviceCmds"); } return commandStream; } private String extractCmd(String topic) { String prefix = cmdTopic.substring(0, cmdTopic.indexOf("{COMMAND}")); String cmd = topic.substring(prefix.length()); int endCmd = cmd.indexOf('/'); if (endCmd != -1) cmd = cmd.substring(0, endCmd); return cmd; } private String extractCmdFmt(String cmdTopic) { return cmdTopic.endsWith("/fmt/json") ? "json" : "string"; } @Override public Topology topology() { return topology; } /** * {@inheritDoc} * <p> * This connector does not support the notion of a device-type * as part of its device id model. An empty string is returned. */ @Override public String getDeviceType() { // not part of this connector's device identifier model return ""; } @Override public String getDeviceId() { return deviceId; } }
/* * Copyright (c) 2013, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.application.authentication.framework.util; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.CarbonConstants; import org.wso2.carbon.claim.mgt.ClaimManagementException; import org.wso2.carbon.claim.mgt.ClaimManagerHandler; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.identity.application.authentication.framework.ApplicationAuthenticator; import org.wso2.carbon.identity.application.authentication.framework.AuthenticatorFlowStatus; import org.wso2.carbon.identity.application.authentication.framework.cache.AuthenticationContextCache; import org.wso2.carbon.identity.application.authentication.framework.cache.AuthenticationContextCacheEntry; import org.wso2.carbon.identity.application.authentication.framework.cache.AuthenticationContextCacheKey; import org.wso2.carbon.identity.application.authentication.framework.cache.AuthenticationRequestCache; import org.wso2.carbon.identity.application.authentication.framework.cache.AuthenticationRequestCacheEntry; import org.wso2.carbon.identity.application.authentication.framework.cache.AuthenticationRequestCacheKey; import org.wso2.carbon.identity.application.authentication.framework.cache.AuthenticationResultCache; import org.wso2.carbon.identity.application.authentication.framework.cache.AuthenticationResultCacheEntry; import org.wso2.carbon.identity.application.authentication.framework.cache.AuthenticationResultCacheKey; import org.wso2.carbon.identity.application.authentication.framework.cache.SessionContextCache; import org.wso2.carbon.identity.application.authentication.framework.cache.SessionContextCacheEntry; import org.wso2.carbon.identity.application.authentication.framework.cache.SessionContextCacheKey; import org.wso2.carbon.identity.application.authentication.framework.config.ConfigurationFacade; import org.wso2.carbon.identity.application.authentication.framework.config.builder.FileBasedConfigurationBuilder; import org.wso2.carbon.identity.application.authentication.framework.config.model.AuthenticatorConfig; import org.wso2.carbon.identity.application.authentication.framework.config.model.ExternalIdPConfig; import org.wso2.carbon.identity.application.authentication.framework.config.model.SequenceConfig; import org.wso2.carbon.identity.application.authentication.framework.config.model.StepConfig; import org.wso2.carbon.identity.application.authentication.framework.context.AuthenticationContext; import org.wso2.carbon.identity.application.authentication.framework.context.SessionContext; import org.wso2.carbon.identity.application.authentication.framework.exception.FrameworkException; import org.wso2.carbon.identity.application.authentication.framework.handler.claims.ClaimHandler; import org.wso2.carbon.identity.application.authentication.framework.handler.claims.impl.DefaultClaimHandler; import org.wso2.carbon.identity.application.authentication.framework.handler.hrd.HomeRealmDiscoverer; import org.wso2.carbon.identity.application.authentication.framework.handler.hrd.impl.DefaultHomeRealmDiscoverer; import org.wso2.carbon.identity.application.authentication.framework.handler.provisioning.ProvisioningHandler; import org.wso2.carbon.identity.application.authentication.framework.handler.provisioning.impl.DefaultProvisioningHandler; import org.wso2.carbon.identity.application.authentication.framework.handler.request.AuthenticationRequestHandler; import org.wso2.carbon.identity.application.authentication.framework.handler.request.LogoutRequestHandler; import org.wso2.carbon.identity.application.authentication.framework.handler.request.RequestCoordinator; import org.wso2.carbon.identity.application.authentication.framework.handler.request.impl.DefaultAuthenticationRequestHandler; import org.wso2.carbon.identity.application.authentication.framework.handler.request.impl.DefaultLogoutRequestHandler; import org.wso2.carbon.identity.application.authentication.framework.handler.request.impl.DefaultRequestCoordinator; import org.wso2.carbon.identity.application.authentication.framework.handler.sequence.RequestPathBasedSequenceHandler; import org.wso2.carbon.identity.application.authentication.framework.handler.sequence.StepBasedSequenceHandler; import org.wso2.carbon.identity.application.authentication.framework.handler.sequence.impl.DefaultRequestPathBasedSequenceHandler; import org.wso2.carbon.identity.application.authentication.framework.handler.sequence.impl.DefaultStepBasedSequenceHandler; import org.wso2.carbon.identity.application.authentication.framework.handler.step.StepHandler; import org.wso2.carbon.identity.application.authentication.framework.handler.step.impl.DefaultStepHandler; import org.wso2.carbon.identity.application.authentication.framework.internal.FrameworkServiceComponent; import org.wso2.carbon.identity.application.authentication.framework.model.AuthenticatedIdPData; import org.wso2.carbon.identity.application.authentication.framework.model.AuthenticationFrameworkWrapper; import org.wso2.carbon.identity.application.authentication.framework.model.AuthenticationRequest; import org.wso2.carbon.identity.application.authentication.framework.model.AuthenticationResult; import org.wso2.carbon.identity.application.common.model.Claim; import org.wso2.carbon.identity.application.common.model.ClaimMapping; import org.wso2.carbon.identity.application.common.model.FederatedAuthenticatorConfig; import org.wso2.carbon.identity.application.common.model.IdentityProvider; import org.wso2.carbon.identity.application.common.model.Property; import org.wso2.carbon.identity.core.util.IdentityUtil; import org.wso2.carbon.user.api.UserStoreException; import org.wso2.carbon.user.core.util.UserCoreUtil; import org.wso2.carbon.utils.multitenancy.MultitenantConstants; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeMap; public class FrameworkUtils { public static final String SESSION_DATA_KEY = "sessionDataKey"; public static final String UTF_8 = "UTF-8"; private static final Log log = LogFactory.getLog(FrameworkUtils.class); private static int maxInactiveInterval; private static final String EMAIL = "email"; private FrameworkUtils() { } /** * To add authentication request cache entry to cache * * @param key cache entry key * @param authReqEntry AuthenticationReqCache Entry. */ public static void addAuthenticationRequestToCache(String key, AuthenticationRequestCacheEntry authReqEntry) { AuthenticationRequestCacheKey cacheKey = new AuthenticationRequestCacheKey(key); AuthenticationRequestCache.getInstance().addToCache(cacheKey, authReqEntry); } /** * To get authentication cache request from cache * * @param key Key of the cache entry * @return */ public static AuthenticationRequestCacheEntry getAuthenticationRequestFromCache(String key) { AuthenticationRequestCacheKey cacheKey = new AuthenticationRequestCacheKey(key); AuthenticationRequestCacheEntry authRequest = AuthenticationRequestCache.getInstance().getValueFromCache(cacheKey); return authRequest; } /** * removes authentication request from cache. * * @param key SessionDataKey */ public static void removeAuthenticationRequestFromCache(String key) { if (key != null) { AuthenticationRequestCacheKey cacheKey = new AuthenticationRequestCacheKey(key); AuthenticationRequestCache.getInstance().clearCacheEntry(cacheKey); } } /** * Builds the wrapper, wrapping incoming request and information take from cache entry * * @param request Original request coming to authentication framework * @param cacheEntry Cache entry from the cache, which is added from calling servlets * @return */ public static HttpServletRequest getCommonAuthReqWithParams(HttpServletRequest request, AuthenticationRequestCacheEntry cacheEntry) { // add this functionality as a constructor Map<String, String[]> modifiableParameters = new TreeMap<String, String[]>(); if (cacheEntry != null) { AuthenticationRequest authenticationRequest = cacheEntry.getAuthenticationRequest(); if (!authenticationRequest.getRequestQueryParams().isEmpty()) { modifiableParameters.putAll(authenticationRequest.getRequestQueryParams()); } // Adding field variables to wrapper if (authenticationRequest.getType() != null) { modifiableParameters.put(FrameworkConstants.RequestParams.TYPE, new String[]{authenticationRequest.getType()}); } if (authenticationRequest.getCommonAuthCallerPath() != null) { modifiableParameters.put(FrameworkConstants.RequestParams.CALLER_PATH, new String[]{authenticationRequest.getCommonAuthCallerPath()}); } if (authenticationRequest.getRelyingParty() != null) { modifiableParameters.put(FrameworkConstants.RequestParams.ISSUER, new String[]{authenticationRequest.getRelyingParty()}); } if (authenticationRequest.getTenantDomain() != null) { modifiableParameters.put(FrameworkConstants.RequestParams.TENANT_DOMAIN, new String[]{authenticationRequest.getTenantDomain()}); } modifiableParameters.put(FrameworkConstants.RequestParams.FORCE_AUTHENTICATE, new String[]{String.valueOf(authenticationRequest.getForceAuth())}); modifiableParameters.put(FrameworkConstants.RequestParams.PASSIVE_AUTHENTICATION, new String[]{String.valueOf(authenticationRequest.getPassiveAuth())}); if (log.isDebugEnabled()) { StringBuilder queryStringBuilder = new StringBuilder(""); for (Map.Entry<String, String[]> entry : modifiableParameters.entrySet()) { StringBuilder paramValueBuilder = new StringBuilder(""); String[] paramValueArr = entry.getValue(); if (paramValueArr != null) { for (String paramValue : paramValueArr) { paramValueBuilder.append("{").append(paramValue).append("}"); } } queryStringBuilder.append("\n").append( entry.getKey() + "=" + paramValueBuilder.toString()); } log.debug("\nInbound Request parameters: " + queryStringBuilder.toString()); } return new AuthenticationFrameworkWrapper(request, modifiableParameters, authenticationRequest.getRequestHeaders()); } return request; } /** * @param name * @return */ public static ApplicationAuthenticator getAppAuthenticatorByName(String name) { for (ApplicationAuthenticator authenticator : FrameworkServiceComponent.getAuthenticators()) { if (name.equals(authenticator.getName())) { return authenticator; } } return null; } /** * @param request * @return */ public static AuthenticationContext getContextData(HttpServletRequest request) { AuthenticationContext context = null; for (ApplicationAuthenticator authenticator : FrameworkServiceComponent.getAuthenticators()) { try { String contextIdentifier = authenticator.getContextIdentifier(request); if (contextIdentifier != null && !contextIdentifier.isEmpty()) { context = FrameworkUtils.getAuthenticationContextFromCache(contextIdentifier); if (context != null) { break; } } } catch (UnsupportedOperationException e) { if (log.isDebugEnabled()) { log.debug("Ignore UnsupportedOperationException.", e); } continue; } } return context; } public static RequestCoordinator getRequestCoordinator() { RequestCoordinator requestCoordinator = null; Object obj = ConfigurationFacade.getInstance().getExtensions() .get(FrameworkConstants.Config.QNAME_EXT_REQ_COORDINATOR); if (obj instanceof RequestCoordinator) { requestCoordinator = (RequestCoordinator) obj; } else { requestCoordinator = DefaultRequestCoordinator.getInstance(); } return requestCoordinator; } /** * @return */ public static AuthenticationRequestHandler getAuthenticationRequestHandler() { AuthenticationRequestHandler authenticationRequestHandler = null; Object obj = ConfigurationFacade.getInstance().getExtensions() .get(FrameworkConstants.Config.QNAME_EXT_AUTH_REQ_HANDLER); if (obj instanceof LogoutRequestHandler) { authenticationRequestHandler = (AuthenticationRequestHandler) obj; } else { authenticationRequestHandler = DefaultAuthenticationRequestHandler.getInstance(); } return authenticationRequestHandler; } /** * @return */ public static LogoutRequestHandler getLogoutRequestHandler() { LogoutRequestHandler logoutRequestHandler = null; Object obj = ConfigurationFacade.getInstance().getExtensions() .get(FrameworkConstants.Config.QNAME_EXT_LOGOUT_REQ_HANDLER); if (obj instanceof AuthenticationRequestHandler) { logoutRequestHandler = (LogoutRequestHandler) obj; } else { logoutRequestHandler = DefaultLogoutRequestHandler.getInstance(); } return logoutRequestHandler; } /** * @return */ public static StepBasedSequenceHandler getStepBasedSequenceHandler() { StepBasedSequenceHandler stepBasedSequenceHandler = null; Object obj = ConfigurationFacade.getInstance().getExtensions() .get(FrameworkConstants.Config.QNAME_EXT_STEP_BASED_SEQ_HANDLER); if (obj instanceof StepBasedSequenceHandler) { stepBasedSequenceHandler = (StepBasedSequenceHandler) obj; } else { stepBasedSequenceHandler = DefaultStepBasedSequenceHandler.getInstance(); } return stepBasedSequenceHandler; } /** * @return */ public static RequestPathBasedSequenceHandler getRequestPathBasedSequenceHandler() { RequestPathBasedSequenceHandler reqPathBasedSeqHandler = null; Object obj = ConfigurationFacade.getInstance().getExtensions() .get(FrameworkConstants.Config.QNAME_EXT_REQ_PATH_BASED_SEQ_HANDLER); if (obj instanceof RequestPathBasedSequenceHandler) { reqPathBasedSeqHandler = (RequestPathBasedSequenceHandler) obj; } else { reqPathBasedSeqHandler = DefaultRequestPathBasedSequenceHandler.getInstance(); } return reqPathBasedSeqHandler; } /** * @return */ public static StepHandler getStepHandler() { StepHandler stepHandler = null; Object obj = ConfigurationFacade.getInstance().getExtensions() .get(FrameworkConstants.Config.QNAME_EXT_STEP_HANDLER); if (obj instanceof StepHandler) { stepHandler = (StepHandler) obj; } else { stepHandler = DefaultStepHandler.getInstance(); } return stepHandler; } /** * @return */ public static HomeRealmDiscoverer getHomeRealmDiscoverer() { HomeRealmDiscoverer homeRealmDiscoverer = null; Object obj = ConfigurationFacade.getInstance().getExtensions() .get(FrameworkConstants.Config.QNAME_EXT_HRD); if (obj instanceof HomeRealmDiscoverer) { homeRealmDiscoverer = (HomeRealmDiscoverer) obj; } else { homeRealmDiscoverer = DefaultHomeRealmDiscoverer.getInstance(); } return homeRealmDiscoverer; } /** * @return */ public static ClaimHandler getClaimHandler() { ClaimHandler claimHandler = null; Object obj = ConfigurationFacade.getInstance().getExtensions() .get(FrameworkConstants.Config.QNAME_EXT_CLAIM_HANDLER); if (obj instanceof ClaimHandler) { claimHandler = (ClaimHandler) obj; } else { claimHandler = DefaultClaimHandler.getInstance(); } return claimHandler; } /** * @return */ public static ProvisioningHandler getProvisioningHandler() { ProvisioningHandler provisioningHandler = null; Object obj = ConfigurationFacade.getInstance().getExtensions() .get(FrameworkConstants.Config.QNAME_EXT_PROVISIONING_HANDLER); if (obj instanceof ProvisioningHandler) { provisioningHandler = (ProvisioningHandler) obj; } else { provisioningHandler = DefaultProvisioningHandler.getInstance(); } return provisioningHandler; } /** * @param request * @param response * @throws IOException */ public static void sendToRetryPage(HttpServletRequest request, HttpServletResponse response) throws IOException { // TODO read the URL from framework config file rather than carbon.xml request.setAttribute(FrameworkConstants.RequestParams.FLOW_STATUS, AuthenticatorFlowStatus.INCOMPLETE); response.sendRedirect(IdentityUtil.getServerURL(ConfigurationFacade.getInstance() .getAuthenticationEndpointRetryURL(), false, false)); } /** * @param req * @param resp */ public static void removeAuthCookie(HttpServletRequest req, HttpServletResponse resp) { Cookie[] cookies = req.getCookies(); if (cookies != null) { for (Cookie cookie : cookies) { if (cookie.getName().equals(FrameworkConstants.COMMONAUTH_COOKIE)) { cookie.setMaxAge(0); cookie.setHttpOnly(true); cookie.setSecure(true); resp.addCookie(cookie); break; } } } } /** * @param req * @param resp * @param id */ public static void storeAuthCookie(HttpServletRequest req, HttpServletResponse resp, String id) { storeAuthCookie(req, resp, id, null); } /** * @param req * @param resp * @param id * @param age */ public static void storeAuthCookie(HttpServletRequest req, HttpServletResponse resp, String id, Integer age) { Cookie authCookie = new Cookie(FrameworkConstants.COMMONAUTH_COOKIE, id); authCookie.setSecure(true); authCookie.setHttpOnly(true); if (age != null) { authCookie.setMaxAge(age.intValue() * 60); } resp.addCookie(authCookie); } /** * @param req * @return */ public static Cookie getAuthCookie(HttpServletRequest req) { Cookie[] cookies = req.getCookies(); if (cookies != null) { for (Cookie cookie : cookies) { if (cookie.getName().equals(FrameworkConstants.COMMONAUTH_COOKIE)) { return cookie; } } } return null; } /** * @param contextId * @param context */ public static void addAuthenticationContextToCache(String contextId, AuthenticationContext context) { AuthenticationContextCacheKey cacheKey = new AuthenticationContextCacheKey(contextId); AuthenticationContextCacheEntry cacheEntry = new AuthenticationContextCacheEntry(context); AuthenticationContextCache.getInstance().addToCache(cacheKey, cacheEntry); } /** * @param key * @param authenticationResult */ public static void addAuthenticationResultToCache(String key, AuthenticationResult authenticationResult) { AuthenticationResultCacheKey cacheKey = new AuthenticationResultCacheKey(key); AuthenticationResultCacheEntry cacheEntry = new AuthenticationResultCacheEntry(); cacheEntry.setResult(authenticationResult); AuthenticationResultCache.getInstance().addToCache(cacheKey, cacheEntry); } /** * To get authentication cache result from cache * @param key * @return */ public static AuthenticationResultCacheEntry getAuthenticationResultFromCache(String key) { AuthenticationResultCacheKey cacheKey = new AuthenticationResultCacheKey(key); AuthenticationResultCacheEntry authResult = AuthenticationResultCache.getInstance().getValueFromCache(cacheKey); return authResult; } /** * Removes authentication result from cache. * @param autheticationResultId */ public static void removeAuthenticationResultFromCache(String autheticationResultId) { if (autheticationResultId != null) { AuthenticationResultCacheKey cacheKey = new AuthenticationResultCacheKey(autheticationResultId); AuthenticationResultCache.getInstance().clearCacheEntry(cacheKey); } } /** * @param key * @param sessionContext */ public static void addSessionContextToCache(String key, SessionContext sessionContext) { SessionContextCacheKey cacheKey = new SessionContextCacheKey(key); SessionContextCacheEntry cacheEntry = new SessionContextCacheEntry(); Map<String, SequenceConfig> seqData = sessionContext.getAuthenticatedSequences(); if (seqData != null) { for (Entry<String, SequenceConfig> entry : seqData.entrySet()) { if (entry.getValue() != null) { entry.getValue().getAuthenticatedUser().setUserAttributes(null); } } } cacheEntry.setContext(sessionContext); SessionContextCache.getInstance().addToCache(cacheKey, cacheEntry); } /** * @param key * @return */ public static SessionContext getSessionContextFromCache(String key) { SessionContext sessionContext = null; SessionContextCacheKey cacheKey = new SessionContextCacheKey(key); Object cacheEntryObj = SessionContextCache.getInstance().getValueFromCache(cacheKey); if (cacheEntryObj != null) { sessionContext = ((SessionContextCacheEntry) cacheEntryObj).getContext(); } return sessionContext; } /** * @param key */ public static void removeSessionContextFromCache(String key) { if (key != null) { SessionContextCacheKey cacheKey = new SessionContextCacheKey(key); SessionContextCache.getInstance().clearCacheEntry(cacheKey); } } /** * @param contextId */ public static void removeAuthenticationContextFromCache(String contextId) { if (contextId != null) { AuthenticationContextCacheKey cacheKey = new AuthenticationContextCacheKey(contextId); AuthenticationContextCache.getInstance().clearCacheEntry(cacheKey); } } /** * @param contextId * @return */ public static AuthenticationContext getAuthenticationContextFromCache(String contextId) { AuthenticationContext authenticationContext = null; AuthenticationContextCacheKey cacheKey = new AuthenticationContextCacheKey(contextId); AuthenticationContextCacheEntry authenticationContextCacheEntry = AuthenticationContextCache.getInstance(). getValueFromCache(cacheKey); if (authenticationContextCacheEntry != null) { authenticationContext = authenticationContextCacheEntry.getContext(); } if (log.isDebugEnabled() && authenticationContext == null) { log.debug("Authentication Context is null"); } return authenticationContext; } /** * @param req */ public static void setRequestPathCredentials(HttpServletRequest req) { // reading the authorization header for request path authentication String reqPathCred = req.getHeader("Authorization"); if (reqPathCred == null) { reqPathCred = req.getParameter("ReqPathCredential"); } if (reqPathCred != null) { log.debug("A Request path credential found"); req.getSession().setAttribute("Authorization", reqPathCred); } } /** * @param externalIdPConfig * @param name * @return */ public static Map<String, String> getAuthenticatorPropertyMapFromIdP( ExternalIdPConfig externalIdPConfig, String name) { Map<String, String> propertyMap = new HashMap<String, String>(); if (externalIdPConfig != null) { FederatedAuthenticatorConfig[] authenticatorConfigs = externalIdPConfig .getIdentityProvider().getFederatedAuthenticatorConfigs(); for (FederatedAuthenticatorConfig authenticatorConfig : authenticatorConfigs) { if (authenticatorConfig.getName().equals(name)) { for (Property property : authenticatorConfig.getProperties()) { propertyMap.put(property.getName(), property.getValue()); } break; } } } return propertyMap; } /** * @param attributeValue * @return */ public static Map<ClaimMapping, String> buildClaimMappings(Map<String, String> attributeValue) { Map<ClaimMapping, String> claimMap = new HashMap<ClaimMapping, String>(); for (Iterator<Entry<String, String>> iterator = attributeValue.entrySet().iterator(); iterator .hasNext(); ) { Entry<String, String> entry = iterator.next(); if (entry.getValue() == null) { continue; } claimMap.put(ClaimMapping.build(entry.getKey(), entry.getKey(), null, false), entry.getValue()); } return claimMap; } /** * @param attributeValues * @return */ public static Set<String> getKeySet(Map<ClaimMapping, String> attributeValues) { Set<String> claimList = new HashSet<String>(); for (Iterator<Entry<ClaimMapping, String>> iterator = attributeValues.entrySet().iterator(); iterator .hasNext(); ) { Entry<ClaimMapping, String> entry = iterator.next(); claimList.add(entry.getKey().getLocalClaim().getClaimUri()); } return claimList; } /** * @param claimMappings * @return */ public static Map<String, String> getClaimMappings(ClaimMapping[] claimMappings, boolean useLocalDialectAsKey) { Map<String, String> remoteToLocalClaimMap = new HashMap<String, String>(); for (ClaimMapping claimMapping : claimMappings) { if (useLocalDialectAsKey) { remoteToLocalClaimMap.put(claimMapping.getLocalClaim().getClaimUri(), claimMapping .getRemoteClaim().getClaimUri()); } else { remoteToLocalClaimMap.put(claimMapping.getRemoteClaim().getClaimUri(), claimMapping .getLocalClaim().getClaimUri()); } } return remoteToLocalClaimMap; } /** * @param claimMappings * @param useLocalDialectAsKey * @return */ public static Map<String, String> getClaimMappings(Map<ClaimMapping, String> claimMappings, boolean useLocalDialectAsKey) { Map<String, String> remoteToLocalClaimMap = new HashMap<String, String>(); for (Entry<ClaimMapping, String> entry : claimMappings.entrySet()) { ClaimMapping claimMapping = entry.getKey(); if (useLocalDialectAsKey) { remoteToLocalClaimMap.put(claimMapping.getLocalClaim().getClaimUri(), entry.getValue()); } else { remoteToLocalClaimMap.put(claimMapping.getRemoteClaim().getClaimUri(), entry.getValue()); } } return remoteToLocalClaimMap; } /** * @param claimMappings * @return */ public static Map<String, String> getLocalToSPClaimMappings(Map<String, String> claimMappings) { Map<String, String> remoteToLocalClaimMap = new HashMap<String, String>(); for (Entry<String, String> entry : claimMappings.entrySet()) { remoteToLocalClaimMap.put(entry.getValue(), entry.getKey()); } return remoteToLocalClaimMap; } public static String getQueryStringWithFrameworkContextId(String originalQueryStr, String callerContextId, String frameworkContextId) { String queryParams = originalQueryStr; /* * Upto now, query-string contained a 'sessionDataKey' of the calling servlet. At here we * replace it with the framework context id. */ queryParams = queryParams.replace(callerContextId, frameworkContextId); return queryParams; } public static List<String> getStepIdPs(StepConfig stepConfig) { List<String> stepIdps = new ArrayList<String>(); List<AuthenticatorConfig> authenticatorConfigs = stepConfig.getAuthenticatorList(); for (AuthenticatorConfig authenticatorConfig : authenticatorConfigs) { List<String> authenticatorIdps = authenticatorConfig.getIdpNames(); for (String authenticatorIdp : authenticatorIdps) { stepIdps.add(authenticatorIdp); } } return stepIdps; } public static List<String> getAuthenticatedStepIdPs(List<String> stepIdPs, List<String> authenticatedIdPs) { List<String> idps = new ArrayList<String>(); if (stepIdPs != null && authenticatedIdPs != null) { for (String stepIdP : stepIdPs) { if (authenticatedIdPs.contains(stepIdP)) { idps.add(stepIdP); break; } } } return idps; } public static Map<String, AuthenticatorConfig> getAuthenticatedStepIdPs(StepConfig stepConfig, Map<String, AuthenticatedIdPData> authenticatedIdPs) { if (log.isDebugEnabled()) { log.debug("Finding already authenticated IdPs of the Step"); } Map<String, AuthenticatorConfig> idpAuthenticatorMap = new HashMap<String, AuthenticatorConfig>(); List<AuthenticatorConfig> authenticatorConfigs = stepConfig.getAuthenticatorList(); if (authenticatedIdPs != null && !authenticatedIdPs.isEmpty()) { for (AuthenticatorConfig authenticatorConfig : authenticatorConfigs) { List<String> authenticatorIdps = authenticatorConfig.getIdpNames(); for (String authenticatorIdp : authenticatorIdps) { AuthenticatedIdPData authenticatedIdPData = authenticatedIdPs .get(authenticatorIdp); if (authenticatedIdPData != null && authenticatedIdPData.getIdpName().equals(authenticatorIdp)) { idpAuthenticatorMap.put(authenticatorIdp, authenticatorConfig); break; } } } } return idpAuthenticatorMap; } public static String getAuthenticatorIdPMappingString(List<AuthenticatorConfig> authConfigList) { StringBuilder authenticatorIdPStr = new StringBuilder(""); for (AuthenticatorConfig authConfig : authConfigList) { StringBuilder idpsOfAuthenticatorStr = new StringBuilder(""); for (String idpName : authConfig.getIdpNames()) { if (idpName != null) { if (idpsOfAuthenticatorStr.length() != 0) { idpsOfAuthenticatorStr.append(":"); } IdentityProvider idp = authConfig.getIdps().get(idpName); if (idp.isFederationHub()) { idpName += ".hub"; } idpsOfAuthenticatorStr.append(idpName); } } if (authenticatorIdPStr.length() != 0) { authenticatorIdPStr.append(";"); } authenticatorIdPStr.append(authConfig.getName()).append(":") .append(idpsOfAuthenticatorStr); } return authenticatorIdPStr.toString(); } /** * when getting query params through this, only configured params will be appended as query params * The required params can be configured from application-authenticators.xml * * @param request * @return */ public static String getQueryStringWithConfiguredParams(HttpServletRequest request) { boolean configAvailable = FileBasedConfigurationBuilder.getInstance() .isAuthEndpointQueryParamsConfigAvailable(); List<String> queryParams = FileBasedConfigurationBuilder.getInstance() .getAuthEndpointQueryParams(); String action = FileBasedConfigurationBuilder.getInstance() .getAuthEndpointQueryParamsAction(); StringBuilder queryStrBuilder = new StringBuilder(""); Map<String, String[]> reqParamMap = request.getParameterMap(); if (configAvailable) { if (action != null && action.equals(FrameworkConstants.AUTH_ENDPOINT_QUERY_PARAMS_ACTION_EXCLUDE)) { if (reqParamMap != null) { for (Map.Entry<String, String[]> entry : reqParamMap.entrySet()) { String paramName = entry.getKey(); String paramValue = entry.getValue()[0]; //skip issuer and type and sessionDataKey parameters if (SESSION_DATA_KEY.equals(paramName) || FrameworkConstants.RequestParams.ISSUER.equals (paramName) || FrameworkConstants.RequestParams.TYPE.equals(paramName)) { continue; } if (!queryParams.contains(paramName)) { if (queryStrBuilder.length() > 0) { queryStrBuilder.append('&'); } try { queryStrBuilder.append(URLEncoder.encode(paramName, UTF_8)).append('=') .append(URLEncoder.encode(paramValue, UTF_8)); } catch (UnsupportedEncodingException e) { log.error( "Error while URL Encoding query param to be sent to the AuthenticationEndpoint", e); } } } } } else { for (String param : queryParams) { String paramValue = request.getParameter(param); if (paramValue != null) { if (queryStrBuilder.length() > 0) { queryStrBuilder.append('&'); } try { queryStrBuilder.append(URLEncoder.encode(param, UTF_8)).append('=') .append(URLEncoder.encode(paramValue, UTF_8)); } catch (UnsupportedEncodingException e) { log.error( "Error while URL Encoding query param to be sent to the AuthenticationEndpoint", e); } } } } } else { if (reqParamMap != null) { for (Map.Entry<String, String[]> entry : reqParamMap.entrySet()) { String paramName = entry.getKey(); String paramValue = entry.getValue()[0]; //skip issuer and type and sessionDataKey parameters if (SESSION_DATA_KEY.equals(paramName) || FrameworkConstants.RequestParams.ISSUER.equals (paramName) || FrameworkConstants.RequestParams.TYPE.equals(paramName)) { continue; } if (queryStrBuilder.length() > 0) { queryStrBuilder.append('&'); } try { queryStrBuilder.append(URLEncoder.encode(paramName, UTF_8)).append('=') .append(URLEncoder.encode(paramValue, UTF_8)); } catch (UnsupportedEncodingException e) { log.error( "Error while URL Encoding query param to be sent to the AuthenticationEndpoint", e); } } } } return queryStrBuilder.toString(); } public static int getMaxInactiveInterval() { return maxInactiveInterval; } public static void setMaxInactiveInterval(int maxInactiveInterval) { FrameworkUtils.maxInactiveInterval = maxInactiveInterval; } public static String prependUserStoreDomainToName(String authenticatedSubject) { if (authenticatedSubject == null || authenticatedSubject.trim().isEmpty()) { throw new IllegalArgumentException("Invalid argument. authenticatedSubject : " + authenticatedSubject); } if (!authenticatedSubject.contains(CarbonConstants.DOMAIN_SEPARATOR)) { if (UserCoreUtil.getDomainFromThreadLocal() != null && !UserCoreUtil.getDomainFromThreadLocal().isEmpty()) { authenticatedSubject = UserCoreUtil.getDomainFromThreadLocal() + CarbonConstants.DOMAIN_SEPARATOR + authenticatedSubject; } } else if (authenticatedSubject.indexOf(CarbonConstants.DOMAIN_SEPARATOR) == 0) { throw new IllegalArgumentException("Invalid argument. authenticatedSubject : " + authenticatedSubject + " begins with \'" + CarbonConstants.DOMAIN_SEPARATOR + "\'"); } return authenticatedSubject; } /* * Find the Subject identifier among federated claims */ public static String getFederatedSubjectFromClaims(IdentityProvider identityProvider, Map<ClaimMapping, String> claimMappings) { String userIdClaimURI = identityProvider.getClaimConfig().getUserClaimURI(); ClaimMapping claimMapping = new ClaimMapping(); Claim claim = new Claim(); claim.setClaimUri(userIdClaimURI); claimMapping.setRemoteClaim(claim); claimMapping.setLocalClaim(claim); return claimMappings.get(claimMapping); } /* * Find the Subject identifier among federated claims */ public static String getFederatedSubjectFromClaims(AuthenticationContext context, String otherDialect) throws FrameworkException { String value; boolean useLocalClaimDialect = context.getExternalIdP().useDefaultLocalIdpDialect(); String userIdClaimURI = context.getExternalIdP().getUserIdClaimUri(); Map<ClaimMapping, String> claimMappings = context.getSubject().getUserAttributes(); if (useLocalClaimDialect) { Map<String, String> extAttributesValueMap = FrameworkUtils.getClaimMappings(claimMappings, false); Map<String, String> mappedAttrs = null; try { mappedAttrs = ClaimManagerHandler.getInstance().getMappingsMapFromOtherDialectToCarbon(otherDialect, extAttributesValueMap.keySet(), context.getTenantDomain(), true); } catch (ClaimManagementException e) { throw new FrameworkException("Error while loading claim mappings.", e); } String spUserIdClaimURI = mappedAttrs.get(userIdClaimURI); value = extAttributesValueMap.get(spUserIdClaimURI); } else { ClaimMapping claimMapping = new ClaimMapping(); Claim claim = new Claim(); claim.setClaimUri(userIdClaimURI); claimMapping.setRemoteClaim(claim); value = claimMappings.get(claimMapping); } return value; } /** * Starts the tenant flow for the given tenant domain * * @param tenantDomain tenant domain */ public static void startTenantFlow(String tenantDomain) { String tenantDomainParam = tenantDomain; int tenantId = MultitenantConstants.SUPER_TENANT_ID; if (tenantDomainParam != null && !tenantDomainParam.trim().isEmpty()) { try { tenantId = FrameworkServiceComponent.getRealmService().getTenantManager() .getTenantId(tenantDomain); } catch (UserStoreException e) { log.error("Error while getting tenantId from tenantDomain query param", e); } } else { tenantDomainParam = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME; } PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext .getThreadLocalCarbonContext(); carbonContext.setTenantId(tenantId); carbonContext.setTenantDomain(tenantDomainParam); } /** * Ends the tenant flow */ public static void endTenantFlow() { PrivilegedCarbonContext.endTenantFlow(); } }
/** * The MIT License (MIT) * * Copyright (c) 2011-2016 Incapture Technologies LLC * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package rapture.repo.stage; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.log4j.Logger; import rapture.common.repo.DocumentBagObject; import rapture.common.repo.DocumentBagReference; import rapture.common.repo.DocumentObject; import rapture.common.repo.TreeObject; import rapture.repo.VersionedRepo; /* * A stage tree is used to represent the merging of a TreeObject and changes at that level to a tree object */ public class StageTree { @SuppressWarnings("unused") private static Logger log = Logger.getLogger(StageTree.class); private TreeObject shadow; // The tree object // the changes below // are applied // against private Map<String, StageTree> stagedTrees; private Map<String, DocumentObject> stagedDocuments; private Map<String, DocumentObject> newStagedDocuments; private Set<String> removedDocuments; private int capacity; public StageTree(TreeObject base, int capacity) { this.shadow = base; this.capacity = capacity; reset(); } /** * Add this document to this tree * * If parts is of length 1, add it to this tree, otherwise either resolve * (shadow) an existing tree in the shadow tree as a StagedTree and add the * document to that (removing one from the list) * * @param parts * @param doc * @param mustBeNew * @throws Exception */ public void addDocumentToStage(VersionedRepo rp, List<String> parts, DocumentObject doc, boolean mustBeNew) { if (parts.size() == 1) { if (mustBeNew) { newStagedDocuments.put(parts.get(0), doc); } else { stagedDocuments.put(parts.get(0), doc); } } else { String currentLevel = parts.remove(0); if (stagedTrees.containsKey(currentLevel)) { stagedTrees.get(currentLevel).addDocumentToStage(rp, parts, doc, mustBeNew); } else { if (shadow.getTrees().containsKey(currentLevel)) { TreeObject to = rp.getObjectDatabase().getTree(shadow.getTrees().get(currentLevel)); StageTree newStage = new StageTree(to, capacity); stagedTrees.put(currentLevel, newStage); } else { StageTree newStage = new StageTree(new TreeObject(), capacity); stagedTrees.put(currentLevel, newStage); } stagedTrees.get(currentLevel).addDocumentToStage(rp, parts, doc, mustBeNew); } } } public void apply(VersionedRepo rp, TreeObject apply, String cRef) { // Apply the changes from this tree object into this current staged tree for (Map.Entry<String, String> t : apply.getTrees().entrySet()) { if (!shadow.getTrees().containsKey(t.getKey())) { addStageTree(rp, t); } else { // If it does contain the same *key*, check the value if (!t.getValue().equals(shadow.getTrees().get(t.getKey()))) { mergeStageTree(rp, cRef, t); } } } applyDocuments(rp, apply, cRef); } private void applyDocuments(VersionedRepo rp, TreeObject apply, String cRef) { // Documents is a bit easier, simply overwrite for now... for (DocumentBagReference bagRef : apply.getDocuments()) { DocumentBagObject dObj = rp.getObjectDatabase().getDocumentBag(bagRef.getBagRef()); applyToBag(rp, cRef, dObj); } } private void applyToBag(VersionedRepo rp, String cRef, DocumentBagObject dObj) { for (Map.Entry<String, String> d : dObj.getDocRefs().entrySet()) { boolean found = false; for (DocumentBagReference bagRef2 : shadow.getDocuments()) { DocumentBagObject dObj2 = rp.getObjectDatabase().getDocumentBag(bagRef2.getBagRef()); if (dObj2.getDocRefs().containsKey(d.getKey())) { if (!dObj2.getDocRefs().get(d.getKey()).equals(d.getValue())) { // Here we have to see what we prefer - the target // could // have moved this document forward and // that is why they are different. OR we could have // moved it forward. Who wins? How do we know // whether // this document was modified here? - perhaps we add // the // commit to the BaseObject found = true; DocumentObject dob = rp.getObjectDatabase().getDocument(d.getValue()); if (cRef.equals(dob.getCommitRef())) { stagedDocuments.put(d.getKey(), rp.getObjectDatabase().getDocument(d.getValue())); } } } } if (!found) { stagedDocuments.put(d.getKey(), rp.getObjectDatabase().getDocument(d.getValue())); } } } private void mergeStageTree(VersionedRepo rp, String cRef, Map.Entry<String, String> t) { // They are different, we need to merge them StageTree st = new StageTree(rp.getObjectDatabase().getTree(shadow.getTrees().get(t.getKey())), capacity); st.apply(rp, rp.getObjectDatabase().getTree(t.getValue()), cRef); stagedTrees.put(t.getKey(), st); } private void addStageTree(VersionedRepo rp, Map.Entry<String, String> t) { StageTree st = new StageTree(rp.getObjectDatabase().getTree(t.getValue()), capacity); stagedTrees.put(t.getKey(), st); } /** * Commit the changes made on this stage to the repo, returning the * reference for the TreeObject we will save for this. * * @param rp * @param commitRef * @return @ */ public String commitStage(VersionedRepo rp, String commitRef, CommitCollector collector, Map<DocumentObject, String> savedDocRefs) { // We create a new tree TreeObject newTreeObject = new TreeObject(); // We need to work out a new document bag structure, by removing and // resaving any document bags that have had // items removed... We then add back any documents back into the bags // that have room Set<DocumentBagObject> needToSave = new HashSet<DocumentBagObject>(); Map<String, DocumentBagObject> maybeReuse = new HashMap<String, DocumentBagObject>(); Set<String> refsToSave = new HashSet<String>(); workOutCommitChanges(rp, needToSave, maybeReuse, refsToSave); applyNewSaves(newTreeObject, refsToSave); // For the ones in needToSave, we can use them to hopefully fill a gap Map<String, DocumentObject> combinedSet = rebalanceTree(); for (Map.Entry<String, DocumentObject> entries : combinedSet.entrySet()) { // If the document is already in the savedDocRefs, we don't need to // save it, we should simply get the reference // from that String reference = null; if (savedDocRefs.containsKey(entries.getValue())) { reference = savedDocRefs.get(entries.getValue()); } else { entries.getValue().setCommitRef(commitRef); reference = rp.getObjectDatabase().writeDocument(entries.getValue()); savedDocRefs.put(entries.getValue(), reference); } collector.addDocReference(reference); collector.addDocName(entries.getKey()); // Find a spot to put this reference in boolean done = false; for (DocumentBagObject d : needToSave) { if (d.getDocRefs().size() < capacity) { d.getDocRefs().put(entries.getKey(), reference); done = true; break; } } if (!done && maybeReuse.size() != 0) { Map.Entry<String, DocumentBagObject> d = maybeReuse.entrySet().iterator().next(); d.getValue().getDocRefs().put(entries.getKey(), reference); needToSave.add(d.getValue()); maybeReuse.remove(d.getKey()); done = true; } if (!done) { DocumentBagObject dNew = new DocumentBagObject(); dNew.getDocRefs().put(entries.getKey(), reference); needToSave.add(dNew); } } // Add back those we didn't use for (String k : maybeReuse.keySet()) { if (!removedDocuments.contains(k)) { DocumentBagReference dr = new DocumentBagReference(); dr.setBagRef(k); dr.setSize(maybeReuse.get(k).getDocRefs().size()); newTreeObject.getDocuments().add(dr); } } // Now we need to save the need to save for (DocumentBagObject d : needToSave) { String reference = rp.getObjectDatabase().writeDocumentBag(d); DocumentBagReference dr = new DocumentBagReference(); dr.setBagRef(reference); dr.setSize(d.getDocRefs().size()); newTreeObject.getDocuments().add(dr); } newTreeObject.getTrees().putAll(shadow.getTrees()); // Add the new ones, do the trees first for (Map.Entry<String, StageTree> entries : stagedTrees.entrySet()) { collector.enterFolder(entries.getKey()); String reference = entries.getValue().commitStage(rp, commitRef, collector, savedDocRefs); collector.addTreeReference(reference); collector.leaveFolder(); collector.addFolderName(entries.getKey()); newTreeObject.getTrees().put(entries.getKey(), reference); } shadow = newTreeObject; reset(); return rp.getObjectDatabase().writeTree(newTreeObject); } private Map<String, DocumentObject> rebalanceTree() { Map<String, DocumentObject> combinedSet = null; if (!stagedDocuments.isEmpty() && !newStagedDocuments.isEmpty()) { combinedSet = new HashMap<String, DocumentObject>(); combinedSet.putAll(stagedDocuments); combinedSet.putAll(newStagedDocuments); } else if (!stagedDocuments.isEmpty()) { combinedSet = stagedDocuments; } else { combinedSet = newStagedDocuments; } return combinedSet; } private void applyNewSaves(TreeObject newTreeObject, Set<String> refsToSave) { for (DocumentBagReference docRef : shadow.getDocuments()) { if (refsToSave.contains(docRef.getBagRef())) { newTreeObject.getDocuments().add(docRef); } } } private void workOutCommitChanges(VersionedRepo rp, Set<DocumentBagObject> needToSave, Map<String, DocumentBagObject> maybeReuse, Set<String> refsToSave) { for (DocumentBagReference docRef : shadow.getDocuments()) { refsToSave.add(docRef.getBagRef()); DocumentBagObject dbo = rp.getObjectDatabase().getDocumentBag(docRef.getBagRef()); boolean changed = false; for (String key : stagedDocuments.keySet()) { if (dbo.getDocRefs().containsKey(key)) { // We need to be saving this back dbo.getDocRefs().remove(key); needToSave.add(dbo); refsToSave.remove(docRef.getBagRef()); changed = true; break; } } for (String rem : removedDocuments) { if (dbo.getDocRefs().containsKey(rem)) { dbo.getDocRefs().remove(rem); refsToSave.remove(docRef.getBagRef()); needToSave.add(dbo); changed = true; } } if (!changed) { if (dbo.getDocRefs().size() < capacity) { maybeReuse.put(docRef.getBagRef(), dbo); refsToSave.remove(docRef.getBagRef()); } } } } public boolean removeFromStage(VersionedRepo rp, LinkedList<String> parts) { // Find the staged tree associated with this part and remove the // document reference from it String currentLevel = parts.remove(); boolean removed = false; if (!parts.isEmpty()) { if (stagedTrees.containsKey(currentLevel)) { stagedTrees.get(currentLevel).removeFromStage(rp, parts); } else if (shadow.getTrees().containsKey(currentLevel)) { TreeObject to = rp.getObjectDatabase().getTree(shadow.getTrees().get(currentLevel)); StageTree newStage = new StageTree(to, capacity); stagedTrees.put(currentLevel, newStage); removed = newStage.removeFromStage(rp, parts); } } else { removedDocuments.add(currentLevel); removed = true; } return removed; } private void reset() { this.stagedTrees = new HashMap<String, StageTree>(); this.stagedDocuments = new HashMap<String, DocumentObject>(); this.newStagedDocuments = new HashMap<String, DocumentObject>(); this.removedDocuments = new HashSet<String>(); } }
/* * Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.internal.networking.nio; import com.hazelcast.config.Config; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.instance.EndpointQualifier; import com.hazelcast.instance.ProtocolType; import com.hazelcast.internal.metrics.MetricsRegistry; import com.hazelcast.internal.server.NetworkStats; import com.hazelcast.test.HazelcastSerialClassRunner; import com.hazelcast.test.annotation.SlowTest; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import java.util.Map; import java.util.function.Function; import static com.hazelcast.instance.ProtocolType.MEMBER; import static com.hazelcast.test.HazelcastTestSupport.assertClusterSizeEventually; import static com.hazelcast.test.HazelcastTestSupport.assertNotContains; import static com.hazelcast.test.HazelcastTestSupport.assertTrueAllTheTime; import static com.hazelcast.test.HazelcastTestSupport.assertTrueEventually; import static com.hazelcast.test.Accessors.getNode; import static com.hazelcast.test.HazelcastTestSupport.smallInstanceConfig; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; @RunWith(HazelcastSerialClassRunner.class) @Category(SlowTest.class) public class AdvancedNetworkStatsIntegrationTest extends AbstractAdvancedNetworkIntegrationTest { private HazelcastInstance instance1; private HazelcastInstance instance2; @Test public void testStats_advancedNetworkEnabledAndConnectionActive_readFromEMs() { Config config = createCompleteMultiSocketConfig(); configureTcpIpConfig(config); instance1 = newHazelcastInstance(config); instance2 = startSecondInstance(); assertTrueEventually(() -> { assertTrue(getBytesReceivedFromEMs(instance1, MEMBER) > 0); assertTrue(getBytesSentFromEMs(instance1, MEMBER) > 0); assertTrue(getBytesReceivedFromEMs(instance2, MEMBER) > 0); assertTrue(getBytesSentFromEMs(instance2, MEMBER) > 0); }); assertNonMemberNetworkStatsAreZeroFromEMs(instance1); assertNonMemberNetworkStatsAreZeroFromEMs(instance2); } @Test public void testStats_advancedNetworkEnabledAndConnectionActive_readFromMetrics() { Config config = createCompleteMultiSocketConfig(); configureTcpIpConfig(config); instance1 = newHazelcastInstance(config); instance2 = startSecondInstance(); assertTrueEventually(() -> { assertTrue(getBytesReceivedFromMetrics(instance1, MEMBER) > 0); assertTrue(getBytesSentFromMetrics(instance1, MEMBER) > 0); assertTrue(getBytesReceivedFromMetrics(instance2, MEMBER) > 0); assertTrue(getBytesSentFromMetrics(instance2, MEMBER) > 0); }); assertNonMemberNetworkStatsAreZeroFromMetrics(instance1); assertNonMemberNetworkStatsAreZeroFromMetrics(instance2); } @Test public void testStats_advancedNetworkEnabledAndConnectionClosed_readFromEMs() { Config config = createCompleteMultiSocketConfig(); configureTcpIpConfig(config); instance1 = newHazelcastInstance(config); instance2 = startSecondInstance(); assertClusterSizeEventually(2, instance1, instance2); instance2.shutdown(); assertClusterSizeEventually(1, instance1); assertTrueEventually(() -> { assertTrue(getBytesReceivedFromEMs(instance1, MEMBER) > 0); assertTrue(getBytesSentFromEMs(instance1, MEMBER) > 0); }); assertNonMemberNetworkStatsAreZeroFromEMs(instance1); } @Test public void testStats_advancedNetworkEnabledAndConnectionClosed_readFromMetrics() { Config config = createCompleteMultiSocketConfig(); configureTcpIpConfig(config); instance1 = newHazelcastInstance(config); instance2 = startSecondInstance(); assertClusterSizeEventually(2, instance1, instance2); instance2.shutdown(); assertClusterSizeEventually(1, instance1); assertTrueEventually(() -> { assertTrue(getBytesReceivedFromMetrics(instance1, MEMBER) > 0); assertTrue(getBytesSentFromMetrics(instance1, MEMBER) > 0); }); assertNonMemberNetworkStatsAreZeroFromMetrics(instance1); } @Test public void testStats_advancedNetworkDisabled() { instance1 = newHazelcastInstance(getUnisocketConfig(MEMBER_PORT)); instance2 = newHazelcastInstance(getUnisocketConfig(MEMBER_PORT + 1)); assertClusterSizeEventually(2, instance1, instance2); assertTrueAllTheTime(() -> { assertAllNetworkStatsAreZeroFromEMs(instance1); assertAllNetworkStatsAreZeroFromEMs(instance2); assertAllNetworkStatsNotRegisteredAsMetrics(instance1); assertAllNetworkStatsNotRegisteredAsMetrics(instance2); }, 30); } private Config getUnisocketConfig(int memberPort) { Config config1 = smallInstanceConfig(); config1.getNetworkConfig().setPort(memberPort); config1.getNetworkConfig().getJoin().getMulticastConfig().setEnabled(false); config1.getNetworkConfig().getJoin().getTcpIpConfig().setEnabled(true) .addMember("127.0.0.1:" + MEMBER_PORT) .addMember("127.0.0.1:" + (MEMBER_PORT + 1)); return config1; } private void assertAllNetworkStatsAreZeroFromEMs(HazelcastInstance instance) { assertEquals(0, getBytesReceivedFromEMs(instance, MEMBER)); assertEquals(0, getBytesSentFromEMs(instance, MEMBER)); assertNonMemberNetworkStatsAreZeroFromEMs(instance); } private void assertNonMemberNetworkStatsAreZeroFromEMs(HazelcastInstance instance) { for (ProtocolType protocolType : ProtocolType.values()) { if (protocolType != MEMBER) { assertEquals(0, getBytesReceivedFromEMs(instance, protocolType)); assertEquals(0, getBytesSentFromEMs(instance, protocolType)); } } } private long getBytesReceivedFromEMs(HazelcastInstance instance, ProtocolType protocolType) { return getBytesTransceivedFromEMs(instance, protocolType, NetworkStats::getBytesReceived); } private long getBytesSentFromEMs(HazelcastInstance instance, ProtocolType protocolType) { return getBytesTransceivedFromEMs(instance, protocolType, NetworkStats::getBytesSent); } private long getBytesTransceivedFromEMs(HazelcastInstance instance, ProtocolType protocolType, Function<NetworkStats, Long> getFn) { Map<EndpointQualifier, NetworkStats> stats = getNode(instance) .getServer() .getNetworkStats(); long bytesTransceived = 0; if (stats != null) { for (Map.Entry<EndpointQualifier, NetworkStats> entry : stats.entrySet()) { if (entry.getKey().getType() == protocolType) { bytesTransceived += getFn.apply(entry.getValue()); } } } return bytesTransceived; } private void assertNonMemberNetworkStatsAreZeroFromMetrics(HazelcastInstance instance) { for (ProtocolType protocolType : ProtocolType.values()) { if (protocolType != MEMBER) { assertEquals(0, getBytesReceivedFromMetrics(instance, protocolType)); assertEquals(0, getBytesSentFromMetrics(instance, protocolType)); } } } private long getBytesReceivedFromMetrics(HazelcastInstance instance, ProtocolType protocolType) { MetricsRegistry registry = getNode(instance).nodeEngine.getMetricsRegistry(); return registry.newLongGauge("tcp.bytesReceived." + protocolType.name()).read(); } private long getBytesSentFromMetrics(HazelcastInstance instance, ProtocolType protocolType) { MetricsRegistry registry = getNode(instance).nodeEngine.getMetricsRegistry(); return registry.newLongGauge("tcp.bytesSend." + protocolType.name()).read(); } private void assertAllNetworkStatsNotRegisteredAsMetrics(HazelcastInstance instance) { MetricsRegistry registry = getNode(instance).nodeEngine.getMetricsRegistry(); for (ProtocolType protocolType : ProtocolType.values()) { assertNotContains(registry.getNames(), "tcp.bytesReceived." + protocolType.name()); assertNotContains(registry.getNames(), "tcp.bytesSend." + protocolType.name()); } } private HazelcastInstance startSecondInstance() { Config config = prepareJoinConfigForSecondMember(MEMBER_PORT); HazelcastInstance newHzInstance = newHazelcastInstance(config); int clusterSize = newHzInstance.getCluster().getMembers().size(); assertEquals(2, clusterSize); return newHzInstance; } }
/***************************************************************************** * Source code information * ----------------------- * Original author Ian Dickinson, HP Labs Bristol * Author email ian_dickinson@users.sourceforge.net * Package Jena 2 * Web http://sourceforge.net/projects/jena/ * Created 25-Mar-2003 * Filename $RCSfile: OntologyImpl.java,v $ * Revision $Revision: 1.2 $ * Release status $State: Exp $ * * Last modified on $Date: 2009/10/06 13:04:42 $ * by $Author: ian_dickinson $ * * (c) Copyright 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP * (see footer for full conditions) *****************************************************************************/ // Package /////////////// package com.hp.hpl.jena.ontology.impl; // Imports /////////////// import com.hp.hpl.jena.enhanced.*; import com.hp.hpl.jena.graph.Node; import com.hp.hpl.jena.ontology.*; import com.hp.hpl.jena.rdf.model.*; import com.hp.hpl.jena.util.iterator.ExtendedIterator; /** * <p> * Implementation of the Ontology interface, encapsulating nodes that hold the * meta-data about whole ontologies. * </p> * * @author Ian Dickinson, HP Labs * (<a href="mailto:ian_dickinson@users.sourceforge.net" >email</a>) * @version CVS $Id: OntologyImpl.java,v 1.2 2009/10/06 13:04:42 ian_dickinson Exp $ */ public class OntologyImpl extends OntResourceImpl implements Ontology { // Constants ////////////////////////////////// // Static variables ////////////////////////////////// /** * A factory for generating Ontology facets from nodes in enhanced graphs. */ @SuppressWarnings("hiding") public static Implementation factory = new Implementation() { @Override public EnhNode wrap( Node n, EnhGraph eg ) { if (canWrap( n, eg )) { return new OntologyImpl( n, eg ); } else { throw new ConversionException( "Cannot convert node " + n + " to Ontology"); } } @Override public boolean canWrap( Node node, EnhGraph eg ) { // node will support being an Ontology facet if it has rdf:type owl:Ontology or equivalent Profile profile = (eg instanceof OntModel) ? ((OntModel) eg).getProfile() : null; return (profile != null) && profile.isSupported( node, eg, Ontology.class ); } }; // Instance variables ////////////////////////////////// // Constructors ////////////////////////////////// /** * <p> * Construct an ontology metadata node represented by the given node in the given graph. * </p> * * @param n The node that represents the resource * @param g The enh graph that contains n */ public OntologyImpl( Node n, EnhGraph g ) { super( n, g ); } // External signature methods ////////////////////////////////// // imports /** * <p>Assert that this ontology imports only the given ontology. Any existing * statements for <code>sameAs</code> will be removed.</p> * @param res Represents a resource that this ontology imports. * @exception OntProfileException If the {@link Profile#IMPORTS()()} property is not supported in the current language profile. */ public void setImport( Resource res ) { setPropertyValue( getProfile().IMPORTS(), "IMPORTS", res ); } /** * <p>Add a resource representing an ontology that this ontology * (strictly, the ontology reprsented by this node) imports.</p> * @param res Represents a resource that this ontology imports. * @exception OntProfileException If the {@link Profile#IMPORTS()()} property is not supported in the current language profile. */ public void addImport( Resource res ) { addPropertyValue( getProfile().IMPORTS(), "IMPORTS", res ); } /** * <p>Answer a resource that represents an ontology imported by this ontology. If there is * more than one such resource, an arbitrary selection is made.</p> * @return An ont resource representing a resource that this ontology imports * @exception OntProfileException If the {@link Profile#IMPORTS()()} property is not supported in the current language profile. */ public OntResource getImport() { return objectAsResource( getProfile().IMPORTS(), "IMPORTS" ); } /** * <p>Answer an iterator over all of the resources representing ontologies imported by this ontology. * Each elemeent of the iterator will be an {@link OntResource}.</p> * @return An iterator over the ontology import resources * @exception OntProfileException If the {@link Profile#IMPORTS()()} property is not supported in the current language profile. */ public ExtendedIterator<OntResource> listImports() { return listAs( getProfile().IMPORTS(), "IMPORTS", OntResource.class ); } /** * <p>Answer true if this ontology (the ontology represented by this * resource) imports the given resource.</p> * @param res A resource to test against * @return True if this ontology imports the ontology represented by <code>res</code> */ public boolean imports( Resource res ) { return hasPropertyValue( getProfile().IMPORTS(), "IMPORTS", res ); } /** * <p>Remove the statement that this ontology imports the ontology represented by the given resource. If this statement * is not true of the current model, nothing happens.</p> * @param res A resource that represents an ontology that is no longer to be imported */ public void removeImport( Resource res ) { removePropertyValue( getProfile().IMPORTS(), "IMPORTS", res ); } // backwardCompatibleWith /** * <p>Assert that this ontology is backward compatible with the given ontology. Any existing * statements for <code>sameAs</code> will be removed.</p> * @param res Represents a resource that this ontology is compatible with. * @exception OntProfileException If the {@link Profile#BACKWARD_COMPATIBLE_WITH} property is not supported in the current language profile. */ public void setBackwardCompatibleWith( Resource res ) { setPropertyValue( getProfile().BACKWARD_COMPATIBLE_WITH(), "BACKWARD_COMPATIBLE_WITH", res ); } /** * <p>Add a resource representing an ontology that this ontology * (strictly, the ontology reprsented by this node) is backwards compatible with.</p> * @param res Represents a resource that this ontology is compatible with. * @exception OntProfileException If the {@link Profile#BACKWARD_COMPATIBLE_WITH} property is not supported in the current language profile. */ public void addBackwardCompatibleWith( Resource res ) { addPropertyValue( getProfile().BACKWARD_COMPATIBLE_WITH(), "BACKWARD_COMPATIBLE_WITH", res ); } /** * <p>Answer a resource that represents an ontology that is backwards compatible with this ontology. If there is * more than one such resource, an arbitrary selection is made.</p> * @return An ont resource representing an ontology that this ontology is compatible with * @exception OntProfileException If the {@link Profile#BACKWARD_COMPATIBLE_WITH} property is not supported in the current language profile. */ public OntResource getBackwardCompatibleWith() { return objectAsResource( getProfile().BACKWARD_COMPATIBLE_WITH(), "BACKWARD_COMPATIBLE_WITH" ); } /** * <p>Answer an iterator over all of the resources representing * ontologies that this ontology is backwards compatible with. * Each element of the iterator will be an {@link OntResource}.</p> * @return An iterator over the ontology resources compatible with this ontology * @exception OntProfileException If the {@link Profile#BACKWARD_COMPATIBLE_WITH} property is not supported in the current language profile. */ public ExtendedIterator<OntResource> listBackwardCompatibleWith() { return listAs( getProfile().BACKWARD_COMPATIBLE_WITH(), "BACKWARD_COMPATIBLE_WITH", OntResource.class ); } /** * <p>Answer true if this ontology (the ontology represented by this * resource) is backward compatible with the given resource.</p> * @param res A resource to test against * @return True if this ontology is compatible with the ontology represented by <code>res</code> */ public boolean isBackwardCompatibleWith( Resource res ) { return hasPropertyValue( getProfile().BACKWARD_COMPATIBLE_WITH(), "BACKWARD_COMPATIBLE_WITH", res ); } /** * <p>Remove the statement that this ontology is backwards compatible with * the ontology represented by the given resource. If this statement * is not true of the current model, nothing happens.</p> * @param res A resource that represents an ontology that is no longer to be imported */ public void removeBackwardCompatibleWith( Resource res ) { removePropertyValue( getProfile().BACKWARD_COMPATIBLE_WITH(), "BACKWARD_COMPATIBLE_WITH", res ); } // priorVersion /** * <p>Assert that this ontology is a new version of the given ontology. Any existing * statements for <code>priorVersion</code> will be removed.</p> * @param res Represents a resource that this ontology supercedes. * @exception OntProfileException If the {@link Profile#PRIOR_VERSION} property is not supported in the current language profile. */ public void setPriorVersion( Resource res ) { setPropertyValue( getProfile().PRIOR_VERSION(), "PRIOR_VERSION", res ); } /** * <p>Add a resource representing an ontology that this ontology * (strictly, the ontology reprsented by this node) supercedes.</p> * @param res Represents a resource that this ontology supercedes. * @exception OntProfileException If the {@link Profile#PRIOR_VERSION} property is not supported in the current language profile. */ public void addPriorVersion( Resource res ) { addPropertyValue( getProfile().PRIOR_VERSION(), "PRIOR_VERSION", res ); } /** * <p>Answer a resource that represents an ontology that is superceded by this ontology. If there is * more than one such resource, an arbitrary selection is made.</p> * @return An ont resource representing an ontology that this ontology supercedes * @exception OntProfileException If the {@link Profile#PRIOR_VERSION} property is not supported in the current language profile. */ public OntResource getPriorVersion() { return objectAsResource( getProfile().PRIOR_VERSION(), "PRIOR_VERSION" ); } /** * <p>Answer an iterator over all of the resources representing * ontologies that this ontology supercedes. * Each element of the iterator will be an {@link OntResource}.</p> * @return An iterator over the ontology resources superceded by this ontology * @exception OntProfileException If the {@link Profile#PRIOR_VERSION} property is not supported in the current language profile. */ public ExtendedIterator<OntResource> listPriorVersion() { return listAs( getProfile().PRIOR_VERSION(), "PRIOR_VERSION", OntResource.class ); } /** * <p>Answer true if this ontology (the ontology represented by this * resource) supercedes the given resource.</p> * @param res A resource to test against * @return True if this ontology supercedes the ontology represented by <code>res</code> */ public boolean hasPriorVersion( Resource res ) { return hasPropertyValue( getProfile().PRIOR_VERSION(), "PRIOR_VERSION", res ); } /** * <p>Remove the statement that the given ontology is a prior version of this ontology. If this statement * is not true of the current model, nothing happens.</p> * @param res A resource that represents an ontology that is no longer a prior version of this ontology */ public void removePriorVersion( Resource res ) { removePropertyValue( getProfile().PRIOR_VERSION(), "PRIOR_VERSION", res ); } // incompatibleWith /** * <p>Assert that this ontology is incompatible with the given ontology. Any existing * statements for <code>incompatibleWith</code> will be removed.</p> * @param res Represents a resource that this ontology is incompatible with. * @exception OntProfileException If the {@link Profile#INCOMPATIBLE_WITH} property is not supported in the current language profile. */ public void setIncompatibleWith( Resource res ) { setPropertyValue( getProfile().INCOMPATIBLE_WITH(), "INCOMPATIBLE_WITH", res ); } /** * <p>Add a resource representing an ontology that this ontology * (strictly, the ontology reprsented by this node) is incompatible with.</p> * @param res Represents a resource that this ontology is incompatible with. * @exception OntProfileException If the {@link Profile#INCOMPATIBLE_WITH} property is not supported in the current language profile. */ public void addIncompatibleWith( Resource res ) { addPropertyValue( getProfile().INCOMPATIBLE_WITH(), "INCOMPATIBLE_WITH", res ); } /** * <p>Answer a resource that represents an ontology that is is incompatible with this ontology. If there is * more than one such resource, an arbitrary selection is made.</p> * @return An ont resource representing an ontology that this ontology is incompatible with * @exception OntProfileException If the {@link Profile#INCOMPATIBLE_WITH} property is not supported in the current language profile. */ public OntResource getIncompatibleWith() { return objectAsResource( getProfile().INCOMPATIBLE_WITH(), "INCOMPATIBLE_WITH" ); } /** * <p>Answer an iterator over all of the resources representing * ontologies that this ontology is incompatible with. * Each element of the iterator will be an {@link OntResource}.</p> * @return An iterator over the ontology resources that this ontology is incompatible with * @exception OntProfileException If the {@link Profile#INCOMPATIBLE_WITH} property is not supported in the current language profile. */ public ExtendedIterator<OntResource> listIncompatibleWith() { return listAs( getProfile().INCOMPATIBLE_WITH(), "INCOMPATIBLE_WITH", OntResource.class ); } /** * <p>Answer true if this ontology (the ontology represented by this * resource) is incompatible with the given resource.</p> * @param res A resource to test against * @return True if this ontology is incompatible with the ontology represented by <code>res</code> */ public boolean isIncompatibleWith( Resource res ) { return hasPropertyValue( getProfile().INCOMPATIBLE_WITH(), "INCOMPATIBLE_WITH", res ); } /** * <p>Remove the statement that the given ontology is incompatible with this ontology. If this statement * is not true of the current model, nothing happens.</p> * @param res A resource that represents an ontology that is no longer incompatible with this ontology */ public void removeIncompatibleWith( Resource res ) { removePropertyValue( getProfile().INCOMPATIBLE_WITH(), "INCOMPATIBLE_WITH", res ); } // Internal implementation methods ////////////////////////////////// //============================================================================== // Inner class definitions //============================================================================== } /* (c) Copyright 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */
package owltools.mooncat; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.log4j.Logger; import org.coode.owlapi.manchesterowlsyntax.ManchesterOWLSyntaxOntologyFormat; import org.obolibrary.obo2owl.Obo2OWLConstants; import org.semanticweb.owlapi.io.OWLFunctionalSyntaxOntologyFormat; import org.semanticweb.owlapi.io.OWLXMLOntologyFormat; import org.semanticweb.owlapi.io.RDFXMLOntologyFormat; import org.semanticweb.owlapi.model.AddImport; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLAxiom; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLDataFactory; import org.semanticweb.owlapi.model.OWLLogicalAxiom; import org.semanticweb.owlapi.model.OWLNamedIndividual; import org.semanticweb.owlapi.model.OWLNamedObject; import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyFormat; import org.semanticweb.owlapi.model.OWLOntologyManager; import org.semanticweb.owlapi.model.OWLOntologyStorageException; /** * @author cjm * * Extracts bridge ontologies from an ontology. A bridge ontology consists solely of class axioms where * the signature of the axiom contains classes that belong to two or more distinct ontologies, together with * any necessary object property axioms * <p> * Here the notion of belonging is determined by IRI - e.g. GO_nnnnn belongs to go. * <p> * This procedure enforces a naming convention whereby the bridge ontology is called * <p> * &lt;srcOntId&gt;-bridge-to-&lt;xOnt&gt; * <p> * If an axiom bridges two or more ontologies, then specialized ontologies of the form * <p> * &lt;srcOntId&gt;-bridge-to-&lt;xOnt1&gt;-and...-and-&lt;xOntN&gt; * <p> * are created * <p> * In addition, an importer ontology is created */ public class BridgeExtractor { OWLOntology ontology; public String subDir = "bridge/"; private Logger LOG = Logger.getLogger(BridgeExtractor.class); Map<String,OWLOntology> nameToOntologyMap; OWLOntology importOntology; Set<Combo> combos; /** * Maps a set of ontologies (e.g. cl, uberon) to a single target (e.g. uberon-plus-cl) * */ public class Combo { Set<String> srcOntIds; String tgtOntId; public Combo(Set<String> srcOntIds, String tgtOntId) { super(); this.srcOntIds = srcOntIds; this.tgtOntId = tgtOntId; } /** * if srcOntIds is a subset of xOntIds, then replace that subset * with tgtOntId * * @param xOntIds */ public void reduce(List<String> xOntIds) { List<String> x = new ArrayList<String>(xOntIds); x.removeAll(srcOntIds); if ((xOntIds.size() - x.size()) == srcOntIds.size()) { xOntIds.removeAll(srcOntIds); xOntIds.add(tgtOntId); Collections.sort(xOntIds); } } } public BridgeExtractor(OWLOntology ontology) { super(); this.ontology = ontology; } public void addCombo(String tgtOntId, Set<String> srcOntIds) { Combo combo = new Combo(srcOntIds, tgtOntId); if (combos == null) combos = new HashSet<Combo>(); combos.add(combo); } /** * given a source ontology O: * <p> * For each axiom A in O : * <ul> * <li>get signature of A</li> * <li>for every object in signature, calculate the set of ontologies these objects belong to</li> * <li>if >1 ontologies, then add the axiom to a bridge ontology dedicated to this list of ontologies * <ul> * <li>add any required object properties</li> * <li>optionally remove the axiom from the source</li> * </ul> * </li> * </ul> * @param srcOntId * @param isRemoveBridgeAxiomsFromSource * @return ontology * @throws OWLOntologyCreationException */ public OWLOntology extractBridgeOntologies(String srcOntId, boolean isRemoveBridgeAxiomsFromSource) throws OWLOntologyCreationException { nameToOntologyMap = new HashMap<String,OWLOntology>(); Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); for (OWLLogicalAxiom ax : ontology.getLogicalAxioms()) { List<String> xOntIds = new ArrayList<String>(); Set<OWLClass> cs = ax.getClassesInSignature(); for (OWLClass c : cs) { String xOntId = getOntId(c); if (!xOntId.equals(srcOntId) && !xOntIds.contains(xOntId)) xOntIds.add(xOntId); } for (OWLNamedIndividual i : ax.getIndividualsInSignature()) { String xOntId = getOntId(i); if (!xOntId.equals(srcOntId) && !xOntIds.contains(xOntId)) xOntIds.add(xOntId); } if (xOntIds.size() > 0) { // bridge ontology connects 2 or more Collections.sort(xOntIds); OWLOntology xOnt = getBridgeOntology(srcOntId, xOntIds); if (isRemoveBridgeAxiomsFromSource) { rmAxioms.add(ax); } ontology.getOWLOntologyManager().addAxiom(xOnt,ax); for (OWLObjectProperty p : ax.getObjectPropertiesInSignature()) { addObjectProperty(p, xOnt); } } } ontology.getOWLOntologyManager().removeAxioms(ontology, rmAxioms); // make importer IRI xIRI = IRI.create(Obo2OWLConstants.DEFAULT_IRI_PREFIX+srcOntId+"/importer.owl"); importOntology = ontology.getOWLOntologyManager().createOntology(xIRI); OWLDataFactory df = ontology.getOWLOntologyManager().getOWLDataFactory(); for (OWLOntology xo : nameToOntologyMap.values()) { AddImport ai = new AddImport(importOntology, df.getOWLImportsDeclaration(xo.getOntologyID().getOntologyIRI())); importOntology.getOWLOntologyManager().applyChange(ai); } LOG.info("Getting importer: "+importOntology); AddImport ai = new AddImport(importOntology, df.getOWLImportsDeclaration(IRI.create(Obo2OWLConstants.DEFAULT_IRI_PREFIX+srcOntId+"/core.owl"))); importOntology.getOWLOntologyManager().applyChange(ai); return importOntology; } private void addObjectProperty(OWLObjectProperty p, OWLOntology xOnt) { if (xOnt.getDeclarationAxioms(p).size() > 0) { return; } OWLOntologyManager m = ontology.getOWLOntologyManager(); OWLDataFactory df = m.getOWLDataFactory(); m.addAxiom(xOnt, df.getOWLDeclarationAxiom(p)); for (OWLAxiom ax : ontology.getAxioms(p)) { m.addAxiom(xOnt, ax); } // TODO } private OWLOntology getBridgeOntology(String srcOntId, List<String> xOntIds) throws OWLOntologyCreationException { StringBuffer n = new StringBuffer(srcOntId + "-bridge-to-"); if (combos != null) { for (Combo combo: combos) { combo.reduce(xOntIds); } } int i = 0; for (String xo : xOntIds) { if (i>0) n.append("-and-"); n.append(xo); i++; } IRI xIRI = IRI.create(Obo2OWLConstants.DEFAULT_IRI_PREFIX+srcOntId+"/bridge/"+n+".owl"); OWLOntology xo = ontology.getOWLOntologyManager().getOntology(xIRI); if (xo == null) { LOG.info("Creating "+xIRI); xo = ontology.getOWLOntologyManager().createOntology(xIRI); nameToOntologyMap.put(n.toString(), xo); } return xo; } private String getOntId(OWLNamedObject c) { String iriStr = c.getIRI().toString(); iriStr = iriStr.replaceAll(".*/", ""); // up to final slash iriStr = iriStr.replaceAll("_\\d+",""); // assumes obo-style //LOG.info(c + " ==> "+iriStr); return iriStr.toLowerCase(); } public void saveBridgeOntologies(String dir) throws FileNotFoundException, OWLOntologyStorageException { saveBridgeOntologies(dir, new RDFXMLOntologyFormat()); } public void saveBridgeOntologies(String dir, OWLOntologyFormat format) throws FileNotFoundException, OWLOntologyStorageException { for (String n : nameToOntologyMap.keySet()) { OWLOntology xo = nameToOntologyMap.get(n); String fn = dir == null ? "bridge/" + n : dir + "/bridge/" + n; save(fn, format, xo); } String n = "importer"; String ifn = dir == null ? n : dir + "/"+n; save(ifn, format, importOntology); } public void save(String fn, OWLOntologyFormat format, OWLOntology xo) throws FileNotFoundException, OWLOntologyStorageException { fn = fn + "." + getSuffix(format); File file = new File(fn); file.getParentFile().mkdirs(); OutputStream os = new FileOutputStream(file); LOG.info("Saving: "+xo); ontology.getOWLOntologyManager().saveOntology(xo, format, os); } private String getSuffix(OWLOntologyFormat format) { if (format instanceof RDFXMLOntologyFormat) { return "owl"; } if (format instanceof OWLFunctionalSyntaxOntologyFormat) { return "ofn"; } if (format instanceof OWLXMLOntologyFormat) { return "owx"; } if (format instanceof ManchesterOWLSyntaxOntologyFormat) { return "omn"; } return "owl"; } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver14; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.List; import com.google.common.collect.ImmutableList; import java.util.Set; import io.netty.buffer.ByteBuf; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFGroupModifyVer14 implements OFGroupModify { private static final Logger logger = LoggerFactory.getLogger(OFGroupModifyVer14.class); // version: 1.4 final static byte WIRE_VERSION = 5; final static int MINIMUM_LENGTH = 16; private final static long DEFAULT_XID = 0x0L; private final static OFGroup DEFAULT_GROUP_ID = OFGroup.ALL; private final static List<OFBucket> DEFAULT_BUCKETS = ImmutableList.<OFBucket>of(); // OF message fields private final long xid; private final OFGroupType groupType; private final OFGroup group; private final List<OFBucket> buckets; // // package private constructor - used by readers, builders, and factory OFGroupModifyVer14(long xid, OFGroupType groupType, OFGroup group, List<OFBucket> buckets) { if(groupType == null) { throw new NullPointerException("OFGroupModifyVer14: property groupType cannot be null"); } if(group == null) { throw new NullPointerException("OFGroupModifyVer14: property group cannot be null"); } if(buckets == null) { throw new NullPointerException("OFGroupModifyVer14: property buckets cannot be null"); } this.xid = xid; this.groupType = groupType; this.group = group; this.buckets = buckets; } // Accessors for OF message fields @Override public OFVersion getVersion() { return OFVersion.OF_14; } @Override public OFType getType() { return OFType.GROUP_MOD; } @Override public long getXid() { return xid; } @Override public OFGroupModCommand getCommand() { return OFGroupModCommand.MODIFY; } @Override public OFGroupType getGroupType() { return groupType; } @Override public OFGroup getGroup() { return group; } @Override public List<OFBucket> getBuckets() { return buckets; } @Override public OFGroupBucket getCommandBucketId()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property commandBucketId not supported in version 1.4"); } @Override public List<OFGroupProp> getProperties()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property properties not supported in version 1.4"); } public OFGroupModify.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFGroupModify.Builder { final OFGroupModifyVer14 parentMessage; // OF message fields private boolean xidSet; private long xid; private boolean groupTypeSet; private OFGroupType groupType; private boolean groupSet; private OFGroup group; private boolean bucketsSet; private List<OFBucket> buckets; BuilderWithParent(OFGroupModifyVer14 parentMessage) { this.parentMessage = parentMessage; } @Override public OFVersion getVersion() { return OFVersion.OF_14; } @Override public OFType getType() { return OFType.GROUP_MOD; } @Override public long getXid() { return xid; } @Override public OFGroupModify.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public OFGroupModCommand getCommand() { return OFGroupModCommand.MODIFY; } @Override public OFGroupType getGroupType() { return groupType; } @Override public OFGroupModify.Builder setGroupType(OFGroupType groupType) { this.groupType = groupType; this.groupTypeSet = true; return this; } @Override public OFGroup getGroup() { return group; } @Override public OFGroupModify.Builder setGroup(OFGroup group) { this.group = group; this.groupSet = true; return this; } @Override public List<OFBucket> getBuckets() { return buckets; } @Override public OFGroupModify.Builder setBuckets(List<OFBucket> buckets) { this.buckets = buckets; this.bucketsSet = true; return this; } @Override public OFGroupBucket getCommandBucketId()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property commandBucketId not supported in version 1.4"); } @Override public OFGroupModify.Builder setCommandBucketId(OFGroupBucket commandBucketId) throws UnsupportedOperationException { throw new UnsupportedOperationException("Property commandBucketId not supported in version 1.4"); } @Override public List<OFGroupProp> getProperties()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property properties not supported in version 1.4"); } @Override public OFGroupModify.Builder setProperties(List<OFGroupProp> properties) throws UnsupportedOperationException { throw new UnsupportedOperationException("Property properties not supported in version 1.4"); } @Override public OFGroupModify build() { long xid = this.xidSet ? this.xid : parentMessage.xid; OFGroupType groupType = this.groupTypeSet ? this.groupType : parentMessage.groupType; if(groupType == null) throw new NullPointerException("Property groupType must not be null"); OFGroup group = this.groupSet ? this.group : parentMessage.group; if(group == null) throw new NullPointerException("Property group must not be null"); List<OFBucket> buckets = this.bucketsSet ? this.buckets : parentMessage.buckets; if(buckets == null) throw new NullPointerException("Property buckets must not be null"); // return new OFGroupModifyVer14( xid, groupType, group, buckets ); } } static class Builder implements OFGroupModify.Builder { // OF message fields private boolean xidSet; private long xid; private boolean groupTypeSet; private OFGroupType groupType; private boolean groupSet; private OFGroup group; private boolean bucketsSet; private List<OFBucket> buckets; @Override public OFVersion getVersion() { return OFVersion.OF_14; } @Override public OFType getType() { return OFType.GROUP_MOD; } @Override public long getXid() { return xid; } @Override public OFGroupModify.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public OFGroupModCommand getCommand() { return OFGroupModCommand.MODIFY; } @Override public OFGroupType getGroupType() { return groupType; } @Override public OFGroupModify.Builder setGroupType(OFGroupType groupType) { this.groupType = groupType; this.groupTypeSet = true; return this; } @Override public OFGroup getGroup() { return group; } @Override public OFGroupModify.Builder setGroup(OFGroup group) { this.group = group; this.groupSet = true; return this; } @Override public List<OFBucket> getBuckets() { return buckets; } @Override public OFGroupModify.Builder setBuckets(List<OFBucket> buckets) { this.buckets = buckets; this.bucketsSet = true; return this; } @Override public OFGroupBucket getCommandBucketId()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property commandBucketId not supported in version 1.4"); } @Override public OFGroupModify.Builder setCommandBucketId(OFGroupBucket commandBucketId) throws UnsupportedOperationException { throw new UnsupportedOperationException("Property commandBucketId not supported in version 1.4"); } @Override public List<OFGroupProp> getProperties()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property properties not supported in version 1.4"); } @Override public OFGroupModify.Builder setProperties(List<OFGroupProp> properties) throws UnsupportedOperationException { throw new UnsupportedOperationException("Property properties not supported in version 1.4"); } // @Override public OFGroupModify build() { long xid = this.xidSet ? this.xid : DEFAULT_XID; if(!this.groupTypeSet) throw new IllegalStateException("Property groupType doesn't have default value -- must be set"); if(groupType == null) throw new NullPointerException("Property groupType must not be null"); OFGroup group = this.groupSet ? this.group : DEFAULT_GROUP_ID; if(group == null) throw new NullPointerException("Property group must not be null"); List<OFBucket> buckets = this.bucketsSet ? this.buckets : DEFAULT_BUCKETS; if(buckets == null) throw new NullPointerException("Property buckets must not be null"); return new OFGroupModifyVer14( xid, groupType, group, buckets ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFGroupModify> { @Override public OFGroupModify readFrom(ByteBuf bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property version == 5 byte version = bb.readByte(); if(version != (byte) 0x5) throw new OFParseError("Wrong version: Expected=OFVersion.OF_14(5), got="+version); // fixed value property type == 15 byte type = bb.readByte(); if(type != (byte) 0xf) throw new OFParseError("Wrong type: Expected=OFType.GROUP_MOD(15), got="+type); int length = U16.f(bb.readShort()); if(length < MINIMUM_LENGTH) throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); long xid = U32.f(bb.readInt()); // fixed value property command == 1 short command = bb.readShort(); if(command != (short) 0x1) throw new OFParseError("Wrong command: Expected=OFGroupModCommand.MODIFY(1), got="+command); OFGroupType groupType = OFGroupTypeSerializerVer14.readFrom(bb); // pad: 1 bytes bb.skipBytes(1); OFGroup group = OFGroup.read4Bytes(bb); List<OFBucket> buckets = ChannelUtils.readList(bb, length - (bb.readerIndex() - start), OFBucketVer14.READER); OFGroupModifyVer14 groupModifyVer14 = new OFGroupModifyVer14( xid, groupType, group, buckets ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", groupModifyVer14); return groupModifyVer14; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFGroupModifyVer14Funnel FUNNEL = new OFGroupModifyVer14Funnel(); static class OFGroupModifyVer14Funnel implements Funnel<OFGroupModifyVer14> { private static final long serialVersionUID = 1L; @Override public void funnel(OFGroupModifyVer14 message, PrimitiveSink sink) { // fixed value property version = 5 sink.putByte((byte) 0x5); // fixed value property type = 15 sink.putByte((byte) 0xf); // FIXME: skip funnel of length sink.putLong(message.xid); // fixed value property command = 1 sink.putShort((short) 0x1); OFGroupTypeSerializerVer14.putTo(message.groupType, sink); // skip pad (1 bytes) message.group.putTo(sink); FunnelUtils.putList(message.buckets, sink); } } public void writeTo(ByteBuf bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFGroupModifyVer14> { @Override public void write(ByteBuf bb, OFGroupModifyVer14 message) { int startIndex = bb.writerIndex(); // fixed value property version = 5 bb.writeByte((byte) 0x5); // fixed value property type = 15 bb.writeByte((byte) 0xf); // length is length of variable message, will be updated at the end int lengthIndex = bb.writerIndex(); bb.writeShort(U16.t(0)); bb.writeInt(U32.t(message.xid)); // fixed value property command = 1 bb.writeShort((short) 0x1); OFGroupTypeSerializerVer14.writeTo(bb, message.groupType); // pad: 1 bytes bb.writeZero(1); message.group.write4Bytes(bb); ChannelUtils.writeList(bb, message.buckets); // update length field int length = bb.writerIndex() - startIndex; bb.setShort(lengthIndex, length); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFGroupModifyVer14("); b.append("xid=").append(xid); b.append(", "); b.append("groupType=").append(groupType); b.append(", "); b.append("group=").append(group); b.append(", "); b.append("buckets=").append(buckets); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFGroupModifyVer14 other = (OFGroupModifyVer14) obj; if( xid != other.xid) return false; if (groupType == null) { if (other.groupType != null) return false; } else if (!groupType.equals(other.groupType)) return false; if (group == null) { if (other.group != null) return false; } else if (!group.equals(other.group)) return false; if (buckets == null) { if (other.buckets != null) return false; } else if (!buckets.equals(other.buckets)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * (int) (xid ^ (xid >>> 32)); result = prime * result + ((groupType == null) ? 0 : groupType.hashCode()); result = prime * result + ((group == null) ? 0 : group.hashCode()); result = prime * result + ((buckets == null) ? 0 : buckets.hashCode()); return result; } }
/* * Copyright (c) 2008-2015 Citrix Systems, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.citrix.netscaler.nitro.resource.config.cmp; import com.citrix.netscaler.nitro.resource.base.*; import com.citrix.netscaler.nitro.service.nitro_service; import com.citrix.netscaler.nitro.service.options; import com.citrix.netscaler.nitro.util.*; import com.citrix.netscaler.nitro.exception.nitro_exception; class cmpglobal_cmppolicy_binding_response extends base_response { public cmpglobal_cmppolicy_binding[] cmpglobal_cmppolicy_binding; } /** * Binding class showing the cmppolicy that can be bound to cmpglobal. */ public class cmpglobal_cmppolicy_binding extends base_resource { private String policyname; private Long priority; private String state; private String type; private Long numpol; private String policytype; private Long __count; /** * <pre> * Positive integer specifying the priority of the policy. The lower the number, the higher the priority. By default, polices within a label are evaluated in the order of their priority numbers. In the configuration utility, you can click the Priority field and edit the priority level or drag the entry to a new position in the list. If you drag the entry to a new position, the priority level is updated automatically. * </pre> */ public void set_priority(long priority) throws Exception { this.priority = new Long(priority); } /** * <pre> * Positive integer specifying the priority of the policy. The lower the number, the higher the priority. By default, polices within a label are evaluated in the order of their priority numbers. In the configuration utility, you can click the Priority field and edit the priority level or drag the entry to a new position in the list. If you drag the entry to a new position, the priority level is updated automatically. * </pre> */ public void set_priority(Long priority) throws Exception{ this.priority = priority; } /** * <pre> * Positive integer specifying the priority of the policy. The lower the number, the higher the priority. By default, polices within a label are evaluated in the order of their priority numbers. In the configuration utility, you can click the Priority field and edit the priority level or drag the entry to a new position in the list. If you drag the entry to a new position, the priority level is updated automatically. * </pre> */ public Long get_priority() throws Exception { return this.priority; } /** * <pre> * The current state of the policy binding. This attribute is relevant only for CLASSIC policies.<br> Possible values = ENABLED, DISABLED * </pre> */ public void set_state(String state) throws Exception{ this.state = state; } /** * <pre> * The current state of the policy binding. This attribute is relevant only for CLASSIC policies.<br> Possible values = ENABLED, DISABLED * </pre> */ public String get_state() throws Exception { return this.state; } /** * <pre> * The name of the globally bound HTTP compression policy. * </pre> */ public void set_policyname(String policyname) throws Exception{ this.policyname = policyname; } /** * <pre> * The name of the globally bound HTTP compression policy. * </pre> */ public String get_policyname() throws Exception { return this.policyname; } /** * <pre> * Bind point to which the policy is bound.<br> Possible values = REQ_OVERRIDE, REQ_DEFAULT, RES_OVERRIDE, RES_DEFAULT * </pre> */ public void set_type(String type) throws Exception{ this.type = type; } /** * <pre> * Bind point to which the policy is bound.<br> Possible values = REQ_OVERRIDE, REQ_DEFAULT, RES_OVERRIDE, RES_DEFAULT * </pre> */ public String get_type() throws Exception { return this.type; } /** * <pre> * The number of policies bound to the bindpoint. * </pre> */ public Long get_numpol() throws Exception { return this.numpol; } /** * <pre> * Policy type (Classic/Advanced) to be bound.Used for display.<br> Possible values = Classic Policy, Advanced Policy * </pre> */ public String get_policytype() throws Exception { return this.policytype; } /** * <pre> * converts nitro response into object and returns the object array in case of get request. * </pre> */ protected base_resource[] get_nitro_response(nitro_service service, String response) throws Exception{ cmpglobal_cmppolicy_binding_response result = (cmpglobal_cmppolicy_binding_response) service.get_payload_formatter().string_to_resource(cmpglobal_cmppolicy_binding_response.class, response); if(result.errorcode != 0) { if (result.errorcode == 444) { service.clear_session(); } if(result.severity != null) { if (result.severity.equals("ERROR")) throw new nitro_exception(result.message,result.errorcode); } else { throw new nitro_exception(result.message,result.errorcode); } } return result.cmpglobal_cmppolicy_binding; } /** * <pre> * Returns the value of object identifier argument * </pre> */ protected String get_object_name() { return null; } public static base_response add(nitro_service client, cmpglobal_cmppolicy_binding resource) throws Exception { cmpglobal_cmppolicy_binding updateresource = new cmpglobal_cmppolicy_binding(); updateresource.policyname = resource.policyname; updateresource.priority = resource.priority; updateresource.state = resource.state; updateresource.type = resource.type; return updateresource.update_resource(client); } public static base_responses add(nitro_service client, cmpglobal_cmppolicy_binding resources[]) throws Exception { base_responses result = null; if (resources != null && resources.length > 0) { cmpglobal_cmppolicy_binding updateresources[] = new cmpglobal_cmppolicy_binding[resources.length]; for (int i=0;i<resources.length;i++){ updateresources[i] = new cmpglobal_cmppolicy_binding(); updateresources[i].policyname = resources[i].policyname; updateresources[i].priority = resources[i].priority; updateresources[i].state = resources[i].state; updateresources[i].type = resources[i].type; } result = update_bulk_request(client, updateresources); } return result; } public static base_response delete(nitro_service client, cmpglobal_cmppolicy_binding resource) throws Exception { cmpglobal_cmppolicy_binding deleteresource = new cmpglobal_cmppolicy_binding(); deleteresource.policyname = resource.policyname; deleteresource.type = resource.type; deleteresource.priority = resource.priority; return deleteresource.delete_resource(client); } public static base_responses delete(nitro_service client, cmpglobal_cmppolicy_binding resources[]) throws Exception { base_responses result = null; if (resources != null && resources.length > 0) { cmpglobal_cmppolicy_binding deleteresources[] = new cmpglobal_cmppolicy_binding[resources.length]; for (int i=0;i<resources.length;i++){ deleteresources[i] = new cmpglobal_cmppolicy_binding(); deleteresources[i].policyname = resources[i].policyname; deleteresources[i].type = resources[i].type; deleteresources[i].priority = resources[i].priority; } result = delete_bulk_request(client, deleteresources); } return result; } /** * Use this API to fetch a cmpglobal_cmppolicy_binding resources. */ public static cmpglobal_cmppolicy_binding[] get(nitro_service service) throws Exception{ cmpglobal_cmppolicy_binding obj = new cmpglobal_cmppolicy_binding(); cmpglobal_cmppolicy_binding response[] = (cmpglobal_cmppolicy_binding[]) obj.get_resources(service); return response; } /** * Use this API to fetch filtered set of cmpglobal_cmppolicy_binding resources. * filter string should be in JSON format.eg: "port:80,servicetype:HTTP". */ public static cmpglobal_cmppolicy_binding[] get_filtered(nitro_service service, String filter) throws Exception{ cmpglobal_cmppolicy_binding obj = new cmpglobal_cmppolicy_binding(); options option = new options(); option.set_filter(filter); cmpglobal_cmppolicy_binding[] response = (cmpglobal_cmppolicy_binding[]) obj.getfiltered(service, option); return response; } /** * Use this API to fetch filtered set of cmpglobal_cmppolicy_binding resources. * set the filter parameter values in filtervalue object. */ public static cmpglobal_cmppolicy_binding[] get_filtered(nitro_service service, filtervalue[] filter) throws Exception{ cmpglobal_cmppolicy_binding obj = new cmpglobal_cmppolicy_binding(); options option = new options(); option.set_filter(filter); cmpglobal_cmppolicy_binding[] response = (cmpglobal_cmppolicy_binding[]) obj.getfiltered(service, option); return response; } /** * Use this API to count cmpglobal_cmppolicy_binding resources configued on NetScaler. */ public static long count(nitro_service service) throws Exception{ cmpglobal_cmppolicy_binding obj = new cmpglobal_cmppolicy_binding(); options option = new options(); option.set_count(true); cmpglobal_cmppolicy_binding response[] = (cmpglobal_cmppolicy_binding[]) obj.get_resources(service,option); if (response != null) { return response[0].__count; } return 0; } /** * Use this API to count the filtered set of cmpglobal_cmppolicy_binding resources. * filter string should be in JSON format.eg: "port:80,servicetype:HTTP". */ public static long count_filtered(nitro_service service, String filter) throws Exception{ cmpglobal_cmppolicy_binding obj = new cmpglobal_cmppolicy_binding(); options option = new options(); option.set_count(true); option.set_filter(filter); cmpglobal_cmppolicy_binding[] response = (cmpglobal_cmppolicy_binding[]) obj.getfiltered(service, option); if (response != null) { return response[0].__count; } return 0; } /** * Use this API to count the filtered set of cmpglobal_cmppolicy_binding resources. * set the filter parameter values in filtervalue object. */ public static long count_filtered(nitro_service service, filtervalue[] filter) throws Exception{ cmpglobal_cmppolicy_binding obj = new cmpglobal_cmppolicy_binding(); options option = new options(); option.set_count(true); option.set_filter(filter); cmpglobal_cmppolicy_binding[] response = (cmpglobal_cmppolicy_binding[]) obj.getfiltered(service, option); if (response != null) { return response[0].__count; } return 0; } public static class stateEnum { public static final String ENABLED = "ENABLED"; public static final String DISABLED = "DISABLED"; } public static class typeEnum { public static final String REQ_OVERRIDE = "REQ_OVERRIDE"; public static final String REQ_DEFAULT = "REQ_DEFAULT"; public static final String RES_OVERRIDE = "RES_OVERRIDE"; public static final String RES_DEFAULT = "RES_DEFAULT"; } public static class policytypeEnum { public static final String Classic_Policy = "Classic Policy"; public static final String Advanced_Policy = "Advanced Policy"; } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.cosmos; import com.azure.cosmos.implementation.ISessionToken; import com.azure.cosmos.implementation.SessionTokenHelper; import com.azure.cosmos.implementation.VectorSessionToken; import com.azure.cosmos.implementation.apachecommons.collections.map.UnmodifiableMap; import com.azure.cosmos.implementation.apachecommons.lang.StringUtils; import com.azure.cosmos.models.CosmosBatchOperationResult; import com.azure.cosmos.models.CosmosBatchResponse; import com.azure.cosmos.models.CosmosItemResponse; import com.azure.cosmos.models.PartitionKey; import com.azure.cosmos.rx.TestSuiteBase; import com.fasterxml.jackson.annotation.JsonProperty; import io.netty.handler.codec.http.HttpResponseStatus; import org.assertj.core.api.Assertions; import org.assertj.core.data.Offset; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.util.Objects; import java.util.Random; import java.util.UUID; import static org.assertj.core.api.Assertions.assertThat; public abstract class BatchTestBase extends TestSuiteBase { private Random random = new Random(); String partitionKey1 = "TBD1"; // items in partitionKey1 TestDoc TestDocPk1ExistingA; TestDoc TestDocPk1ExistingB ; TestDoc TestDocPk1ExistingC; TestDoc TestDocPk1ExistingD; public BatchTestBase(CosmosClientBuilder clientBuilder) { super(clientBuilder); } void createJsonTestDocs(CosmosContainer container) { this.TestDocPk1ExistingA = this.createJsonTestDoc(container, this.partitionKey1); this.TestDocPk1ExistingB = this.createJsonTestDoc(container, this.partitionKey1); this.TestDocPk1ExistingC = this.createJsonTestDoc(container, this.partitionKey1); this.TestDocPk1ExistingD = this.createJsonTestDoc(container, this.partitionKey1); } void createJsonTestDocs(CosmosAsyncContainer container) { this.TestDocPk1ExistingA = this.createJsonTestDoc(container, this.partitionKey1); this.TestDocPk1ExistingB = this.createJsonTestDoc(container, this.partitionKey1); this.TestDocPk1ExistingC = this.createJsonTestDoc(container, this.partitionKey1); this.TestDocPk1ExistingD = this.createJsonTestDoc(container, this.partitionKey1); } TestDoc populateTestDoc(String partitionKey) { return populateTestDoc(partitionKey, 20); } TestDoc populateTestDoc(String partitionKey, int cost, int minDesiredSize) { String description = StringUtils.repeat("x", minDesiredSize); return new TestDoc(cost + UUID.randomUUID().toString(), cost, description, partitionKey); } TestDoc populateTestDoc(String partitionKey, int minDesiredSize) { String description = StringUtils.repeat("x", minDesiredSize); return new TestDoc(UUID.randomUUID().toString(), this.random.nextInt(), description, partitionKey); } TestDoc getTestDocCopy(TestDoc testDoc) { return new TestDoc(testDoc.getId(), testDoc.getCost(), testDoc.getDescription(), testDoc.getStatus()); } void verifyByRead(CosmosAsyncContainer container, TestDoc doc) { verifyByRead(container, doc, null); } void verifyByRead(CosmosAsyncContainer container, TestDoc doc, String eTag) { PartitionKey partitionKey = this.getPartitionKey(doc.getStatus()); CosmosItemResponse<TestDoc> response = container.readItem(doc.getId(), partitionKey, TestDoc.class).block(); assertThat(response.getStatusCode()).isEqualTo(HttpResponseStatus.OK.code()); assertThat(response.getItem()).isEqualTo(doc); if (eTag != null) { assertThat(response.getETag()).isEqualTo(eTag); } } void verifyByRead(CosmosContainer container, TestDoc doc) { verifyByRead(container, doc, null); } void verifyByRead(CosmosContainer container, TestDoc doc, String eTag) { PartitionKey partitionKey = this.getPartitionKey(doc.getStatus()); CosmosItemResponse<TestDoc> response = container.readItem(doc.getId(), partitionKey, TestDoc.class); assertThat(response.getStatusCode()).isEqualTo(HttpResponseStatus.OK.code()); assertThat(response.getItem()).isEqualTo(doc); if (eTag != null) { assertThat(response.getETag()).isEqualTo(eTag); } } void verifyNotFound(CosmosContainer container, TestDoc doc) { String id = doc.getId(); PartitionKey partitionKey = this.getPartitionKey(doc.getStatus()); try { CosmosItemResponse<TestDoc> response = container.readItem(id, partitionKey, TestDoc.class); Assertions.fail("Should throw NOT_FOUND exception"); } catch (CosmosException ex) { assertThat(ex.getStatusCode()).isEqualTo(HttpResponseStatus.NOT_FOUND.code()); } } PartitionKey getPartitionKey(String partitionKey) { return new PartitionKey(partitionKey); } private TestDoc createJsonTestDoc(CosmosContainer container, String partitionKey) { return createJsonTestDoc(container, partitionKey, 20); } TestDoc createJsonTestDoc(CosmosContainer container, String partitionKey, int minDesiredSize) { TestDoc doc = this.populateTestDoc(partitionKey, minDesiredSize); CosmosItemResponse<TestDoc> createResponse = container.createItem(doc, this.getPartitionKey(partitionKey), null); assertThat(createResponse.getStatusCode()).isEqualTo(HttpResponseStatus.CREATED.code()); return doc; } private TestDoc createJsonTestDoc(CosmosAsyncContainer container, String partitionKey) { return createJsonTestDoc(container, partitionKey, 20); } TestDoc createJsonTestDoc(CosmosAsyncContainer container, String partitionKey, int minDesiredSize) { TestDoc doc = this.populateTestDoc(partitionKey, minDesiredSize); CosmosItemResponse<TestDoc> createResponse = container.createItem(doc, this.getPartitionKey(partitionKey), null).block(); assertThat(createResponse.getStatusCode()).isEqualTo(HttpResponseStatus.CREATED.code()); return doc; } public Random getRandom() { return random; } ISessionToken getSessionToken(String sessionToken) { String[] tokenParts = org.apache.commons.lang3.StringUtils.split(sessionToken, ':'); return SessionTokenHelper.parse(tokenParts[1]); } String getDifferentLSNToken(String token, long lsnDifferent) throws Exception { String[] tokenParts = org.apache.commons.lang3.StringUtils.split(token, ':'); ISessionToken sessionToken = SessionTokenHelper.parse(tokenParts[1]); ISessionToken differentSessionToken = createSessionToken(sessionToken, sessionToken.getLSN() + lsnDifferent); return String.format("%s:%s", tokenParts[0], differentSessionToken.convertToString()); } @SuppressWarnings({"unchecked", "rawtypes"}) private static ISessionToken createSessionToken(ISessionToken from, long globalLSN) throws Exception { // Creates session token with specified GlobalLSN if (from instanceof VectorSessionToken) { VectorSessionToken fromSessionToken = (VectorSessionToken) from; Field fieldVersion = VectorSessionToken.class.getDeclaredField("version"); fieldVersion.setAccessible(true); Long version = (Long) fieldVersion.get(fromSessionToken); Field fieldLocalLsnByRegion = VectorSessionToken.class.getDeclaredField("localLsnByRegion"); fieldLocalLsnByRegion.setAccessible(true); UnmodifiableMap<Integer, Long> localLsnByRegion = (UnmodifiableMap<Integer, Long>) fieldLocalLsnByRegion.get(fromSessionToken); Constructor<VectorSessionToken> constructor = VectorSessionToken.class.getDeclaredConstructor(long.class, long.class, UnmodifiableMap.class); constructor.setAccessible(true); VectorSessionToken vectorSessionToken = constructor.newInstance(version, globalLSN, localLsnByRegion); return vectorSessionToken; } else { throw new IllegalArgumentException(); } } void verifyBatchProcessed(CosmosBatchResponse batchResponse, int numberOfOperations) { this.verifyBatchProcessed(batchResponse, numberOfOperations, HttpResponseStatus.OK); } void verifyBatchProcessed(CosmosBatchResponse batchResponse, int numberOfOperations, HttpResponseStatus expectedStatusCode) { assertThat(batchResponse).isNotNull(); assertThat(batchResponse.getStatusCode()) .as("Batch server response had StatusCode {0} instead of {1} expected and had ErrorMessage {2}", batchResponse.getStatusCode(), expectedStatusCode.code()) .isEqualTo(expectedStatusCode.code()); assertThat(batchResponse.size()).isEqualTo(numberOfOperations); assertThat(batchResponse.getRequestCharge()).isPositive(); assertThat(batchResponse.getDiagnostics().toString()).isNotEmpty(); // Allow a delta since we round both the total charge and the individual operation // charges to 2 decimal places. assertThat(batchResponse.getRequestCharge()) .isCloseTo(batchResponse.getResults().stream().mapToDouble(CosmosBatchOperationResult::getRequestCharge).sum(), Offset.offset(0.1)); } public static class TestDoc { public String id; public int cost; public String description; @JsonProperty("mypk") public String status; public TestDoc() { } public TestDoc(String id, int cost, String description, String status) { this.id = id; this.cost = cost; this.description = description; this.status = status; } public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } TestDoc testDoc2 = (TestDoc) obj; return (this.getId().equals(testDoc2.getId()) && this.getCost() == testDoc2.getCost()) && this.getDescription().equals(testDoc2.getDescription()) && this.getStatus().equals(testDoc2.getStatus()); } @Override public int hashCode() { int hashCode = 1652434776; hashCode = (hashCode * -1521134295) + this.id.hashCode(); hashCode = (hashCode * -1521134295) + this.cost; hashCode = (hashCode * -1521134295) + this.description.hashCode(); hashCode = (hashCode * -1521134295) + this.status.hashCode(); return hashCode; } public String getId() { return id; } public void setId(String id) { this.id = id; } public int getCost() { return cost; } public void setCost(int cost) { this.cost = cost; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } @Override public String toString() { return "TestDoc{" + "id='" + id + '\'' + ", cost=" + cost + ", description='" + description + '\'' + ", status='" + status + '\'' + '}'; } } public static class EventDoc { public String id; int clicks; int views; String type; @JsonProperty("mypk") public String partitionKey; public EventDoc() { } public EventDoc(String id, int clicks, int views, String type, String partitionKey) { this.id = id; this.clicks = clicks; this.views = views; this.type = type; this.partitionKey = partitionKey; } public String getId() { return id; } public int getClicks() { return clicks; } public int getViews() { return views; } public String getType() { return type; } public String getPartitionKey() { return partitionKey; } public void setId(String id) { this.id = id; } public void setClicks(int clicks) { this.clicks = clicks; } public void setViews(int views) { this.views = views; } public void setType(String type) { this.type = type; } public void setPartitionKey(String partitionKey) { this.partitionKey = partitionKey; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; EventDoc eventDoc = (EventDoc) o; return clicks == eventDoc.clicks && views == eventDoc.views && Objects.equals(id, eventDoc.id) && Objects.equals(type, eventDoc.type) && Objects.equals(partitionKey, eventDoc.partitionKey); } @Override public int hashCode() { return Objects.hash(id, clicks, views, type, partitionKey); } @Override public String toString() { return "EventDoc{" + "id='" + id + '\'' + ", clicks=" + clicks + ", views=" + views + ", type='" + type + '\'' + ", partitionKey='" + partitionKey + '\'' + '}'; } } }
/** * Copyright 2010 The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.util; import static java.lang.Integer.rotateLeft; import java.io.FileInputStream; import java.io.IOException; /** * Produces 32-bit hash for hash table lookup. * * <pre>lookup3.c, by Bob Jenkins, May 2006, Public Domain. * * You can use this free for any purpose. It's in the public domain. * It has no warranty. * </pre> * * @see <a href="http://burtleburtle.net/bob/c/lookup3.c">lookup3.c</a> * @see <a href="http://www.ddj.com/184410284">Hash Functions (and how this * function compares to others such as CRC, MD?, etc</a> * @see <a href="http://burtleburtle.net/bob/hash/doobs.html">Has update on the * Dr. Dobbs Article</a> */ public class JenkinsHash extends Hash { private static final int BYTE_MASK = 0xff; private static JenkinsHash _instance = new JenkinsHash(); public static Hash getInstance() { return _instance; } /** * taken from hashlittle() -- hash a variable-length key into a 32-bit value * * @param key the key (the unaligned variable-length array of bytes) * @param nbytes number of bytes to include in hash * @param initval can be any integer value * @return a 32-bit value. Every bit of the key affects every bit of the * return value. Two keys differing by one or two bits will have totally * different hash values. * * <p>The best hash table sizes are powers of 2. There is no need to do mod * a prime (mod is sooo slow!). If you need less than 32 bits, use a bitmask. * For example, if you need only 10 bits, do * <code>h = (h & hashmask(10));</code> * In which case, the hash table should have hashsize(10) elements. * * <p>If you are hashing n strings byte[][] k, do it like this: * for (int i = 0, h = 0; i < n; ++i) h = hash( k[i], h); * * <p>By Bob Jenkins, 2006. bob_jenkins@burtleburtle.net. You may use this * code any way you wish, private, educational, or commercial. It's free. * * <p>Use for hash table lookup, or anything where one collision in 2^^32 is * acceptable. Do NOT use for cryptographic purposes. */ @Override @SuppressWarnings("fallthrough") public int hash(byte[] key, int off, int nbytes, int initval) { int length = nbytes; int a, b, c; a = b = c = 0xdeadbeef + length + initval; int offset = off; for (; length > 12; offset += 12, length -= 12) { a += (key[offset] & BYTE_MASK); a += ((key[offset + 1] & BYTE_MASK) << 8); a += ((key[offset + 2] & BYTE_MASK) << 16); a += ((key[offset + 3] & BYTE_MASK) << 24); b += (key[offset + 4] & BYTE_MASK); b += ((key[offset + 5] & BYTE_MASK) << 8); b += ((key[offset + 6] & BYTE_MASK) << 16); b += ((key[offset + 7] & BYTE_MASK) << 24); c += (key[offset + 8] & BYTE_MASK); c += ((key[offset + 9] & BYTE_MASK) << 8); c += ((key[offset + 10] & BYTE_MASK) << 16); c += ((key[offset + 11] & BYTE_MASK) << 24); /* * mix -- mix 3 32-bit values reversibly. * This is reversible, so any information in (a,b,c) before mix() is * still in (a,b,c) after mix(). * * If four pairs of (a,b,c) inputs are run through mix(), or through * mix() in reverse, there are at least 32 bits of the output that * are sometimes the same for one pair and different for another pair. * * This was tested for: * - pairs that differed by one bit, by two bits, in any combination * of top bits of (a,b,c), or in any combination of bottom bits of * (a,b,c). * - "differ" is defined as +, -, ^, or ~^. For + and -, I transformed * the output delta to a Gray code (a^(a>>1)) so a string of 1's (as * is commonly produced by subtraction) look like a single 1-bit * difference. * - the base values were pseudorandom, all zero but one bit set, or * all zero plus a counter that starts at zero. * * Some k values for my "a-=c; a^=rot(c,k); c+=b;" arrangement that * satisfy this are * 4 6 8 16 19 4 * 9 15 3 18 27 15 * 14 9 3 7 17 3 * Well, "9 15 3 18 27 15" didn't quite get 32 bits diffing for * "differ" defined as + with a one-bit base and a two-bit delta. I * used http://burtleburtle.net/bob/hash/avalanche.html to choose * the operations, constants, and arrangements of the variables. * * This does not achieve avalanche. There are input bits of (a,b,c) * that fail to affect some output bits of (a,b,c), especially of a. * The most thoroughly mixed value is c, but it doesn't really even * achieve avalanche in c. * * This allows some parallelism. Read-after-writes are good at doubling * the number of bits affected, so the goal of mixing pulls in the * opposite direction as the goal of parallelism. I did what I could. * Rotates seem to cost as much as shifts on every machine I could lay * my hands on, and rotates are much kinder to the top and bottom bits, * so I used rotates. * * #define mix(a,b,c) \ * { \ * a -= c; a ^= rot(c, 4); c += b; \ * b -= a; b ^= rot(a, 6); a += c; \ * c -= b; c ^= rot(b, 8); b += a; \ * a -= c; a ^= rot(c,16); c += b; \ * b -= a; b ^= rot(a,19); a += c; \ * c -= b; c ^= rot(b, 4); b += a; \ * } * * mix(a,b,c); */ a -= c; a ^= rotateLeft(c, 4); c += b; b -= a; b ^= rotateLeft(a, 6); a += c; c -= b; c ^= rotateLeft(b, 8); b += a; a -= c; a ^= rotateLeft(c, 16); c += b; b -= a; b ^= rotateLeft(a, 19); a += c; c -= b; c ^= rotateLeft(b, 4); b += a; } //-------------------------------- last block: affect all 32 bits of (c) switch (length) { // all the case statements fall through case 12: c += ((key[offset + 11] & BYTE_MASK) << 24); case 11: c += ((key[offset + 10] & BYTE_MASK) << 16); case 10: c += ((key[offset + 9] & BYTE_MASK) << 8); case 9: c += (key[offset + 8] & BYTE_MASK); case 8: b += ((key[offset + 7] & BYTE_MASK) << 24); case 7: b += ((key[offset + 6] & BYTE_MASK) << 16); case 6: b += ((key[offset + 5] & BYTE_MASK) << 8); case 5: b += (key[offset + 4] & BYTE_MASK); case 4: a += ((key[offset + 3] & BYTE_MASK) << 24); case 3: a += ((key[offset + 2] & BYTE_MASK) << 16); case 2: a += ((key[offset + 1] & BYTE_MASK) << 8); case 1: //noinspection PointlessArithmeticExpression a += (key[offset + 0] & BYTE_MASK); break; case 0: return c; } /* * final -- final mixing of 3 32-bit values (a,b,c) into c * * Pairs of (a,b,c) values differing in only a few bits will usually * produce values of c that look totally different. This was tested for * - pairs that differed by one bit, by two bits, in any combination * of top bits of (a,b,c), or in any combination of bottom bits of * (a,b,c). * * - "differ" is defined as +, -, ^, or ~^. For + and -, I transformed * the output delta to a Gray code (a^(a>>1)) so a string of 1's (as * is commonly produced by subtraction) look like a single 1-bit * difference. * * - the base values were pseudorandom, all zero but one bit set, or * all zero plus a counter that starts at zero. * * These constants passed: * 14 11 25 16 4 14 24 * 12 14 25 16 4 14 24 * and these came close: * 4 8 15 26 3 22 24 * 10 8 15 26 3 22 24 * 11 8 15 26 3 22 24 * * #define final(a,b,c) \ * { * c ^= b; c -= rot(b,14); \ * a ^= c; a -= rot(c,11); \ * b ^= a; b -= rot(a,25); \ * c ^= b; c -= rot(b,16); \ * a ^= c; a -= rot(c,4); \ * b ^= a; b -= rot(a,14); \ * c ^= b; c -= rot(b,24); \ * } * */ c ^= b; c -= rotateLeft(b, 14); a ^= c; a -= rotateLeft(c, 11); b ^= a; b -= rotateLeft(a, 25); c ^= b; c -= rotateLeft(b, 16); a ^= c; a -= rotateLeft(c, 4); b ^= a; b -= rotateLeft(a, 14); c ^= b; c -= rotateLeft(b, 24); return c; } /** * Compute the hash of the specified file * @param args name of file to compute hash of. * @throws IOException e */ public static void main(String[] args) throws IOException { if (args.length != 1) { System.err.println("Usage: JenkinsHash filename"); System.exit(-1); } FileInputStream in = new FileInputStream(args[0]); byte[] bytes = new byte[512]; int value = 0; JenkinsHash hash = new JenkinsHash(); for (int length = in.read(bytes); length > 0; length = in.read(bytes)) { value = hash.hash(bytes, length, value); } System.out.println(Math.abs(value)); } }
/* * Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package com.gemstone.gemfire.management.internal.beans; import java.util.Map; import javax.management.NotificationBroadcasterSupport; import javax.management.ObjectName; import com.gemstone.gemfire.management.DiskBackupStatus; import com.gemstone.gemfire.management.DiskMetrics; import com.gemstone.gemfire.management.DistributedSystemMXBean; import com.gemstone.gemfire.management.GemFireProperties; import com.gemstone.gemfire.management.JVMMetrics; import com.gemstone.gemfire.management.NetworkMetrics; import com.gemstone.gemfire.management.OSMetrics; import com.gemstone.gemfire.management.PersistentMemberDetails; /** * Distributed System MBean * * It is provided with one bridge reference which acts as an intermediate for * JMX and GemFire. * @author rishim * */ public class DistributedSystemMBean extends NotificationBroadcasterSupport implements DistributedSystemMXBean { /** * Injected DistributedSystemBridge */ private DistributedSystemBridge bridge; public DistributedSystemMBean(DistributedSystemBridge bridge) { this.bridge = bridge; } @Override public DiskBackupStatus backupAllMembers(String targetDirPath) throws Exception { return bridge.backupAllMembers(targetDirPath); } @Override public String getAlertLevel() { return bridge.getAlertLevel(); } @Override public String[] listCacheServers() { return bridge.listCacheServers(); } @Override public int getNumClients() { return bridge.getNumClients(); } @Override public long getActiveCQCount() { return bridge.getActiveCQCount(); } @Override public DiskMetrics showDiskMetrics(String member) throws Exception { return bridge.showDiskMetrics(member); } @Override public float getDiskReadsRate() { return bridge.getDiskReadsRate(); } @Override public float getDiskWritesRate() { return bridge.getDiskWritesRate(); } @Override public long getDiskFlushAvgLatency() { return bridge.getDiskFlushAvgLatency(); } @Override public String[] listGatewayReceivers() { return bridge.listGatewayReceivers(); } @Override public String[] listGatewaySenders() { return bridge.listGatwaySenders(); } @Override public JVMMetrics showJVMMetrics(String member) throws Exception { return bridge.showJVMMetrics(member); } @Override public int getLocatorCount() { return bridge.getLocatorCount(); } @Override public String[] listLocators() { return bridge.listLocators(); } @Override public GemFireProperties fetchMemberConfiguration(String member) throws Exception { return bridge.fetchMemberConfiguration(member); } @Override public int getDistributedSystemId() { return bridge.getDistributedSystemId(); } @Override public int getMemberCount() { return bridge.getMemberCount(); } @Override public Map<String, String[]> listMemberDiskstore() { return bridge.getMemberDiskstoreMap(); } @Override public long fetchMemberUpTime(String member) throws Exception{ return bridge.getMemberUpTime(member); } @Override public String[] listMembers() { return bridge.getMembers(); } public String[] listLocatorMembers(boolean onlyStandAloneLocators) { return bridge.listLocatorMembers(onlyStandAloneLocators); } @Override public String[] listGroups() { return bridge.getGroups(); } @Override public NetworkMetrics showNetworkMetric(String member) throws Exception { return bridge.showNetworkMetric(member); } @Override public int getNumInitialImagesInProgress() { return bridge.getNumInitialImagesInProgress(); } @Override public OSMetrics showOSMetrics(String member) throws Exception { return bridge.showOSMetrics(member); } @Override public float getQueryRequestRate() { return bridge.getQueryRequestRate(); } @Override public int getSystemDiskStoreCount() { return bridge.getSystemDiskStoreCount(); } @Override public int getTotalBackupInProgress() { return bridge.getTotalBackupInProgress(); } @Override public long getTotalHeapSize() { return bridge.getTotalHeapSize(); } @Override public int getTotalHitCount() { return bridge.getTotalHitCount(); } @Override public int getTotalMissCount() { return bridge.getTotalMissCount(); } @Override public int getTotalRegionCount() { return bridge.getTotalRegionCount(); } @Override public long getTotalRegionEntryCount() { return bridge.getTotalRegionEntryCount(); } @Override public void changeAlertLevel(String alertLevel) throws Exception { bridge.changeAlertLevel(alertLevel); } @Override public String[] shutDownAllMembers() throws Exception { return bridge.shutDownAllMembers(); } @Override public String[] listRegions() { return bridge.listAllRegions(); } @Override public String[] listAllRegionPaths() { return bridge.listAllRegionPaths(); } @Override public PersistentMemberDetails[] listMissingDiskStores() { return bridge.listMissingDiskStores(); } @Override public boolean revokeMissingDiskStores(final String diskStoreId) throws Exception { return bridge.revokeMissingDiskStores(diskStoreId); } @Override public ObjectName getMemberObjectName() { return bridge.getMemberObjectName(); } @Override public ObjectName getManagerObjectName() { return bridge.getManagerObjectName(); } @Override public ObjectName fetchMemberObjectName(String member) throws Exception{ return bridge.fetchMemberObjectName(member); } @Override public ObjectName[] listMemberObjectNames() { return bridge.listMemberObjectNames(); } @Override public ObjectName fetchDistributedRegionObjectName(String regionPath) throws Exception { return bridge.fetchDistributedRegionObjectName(regionPath); } @Override public ObjectName fetchRegionObjectName(String member, String regionPath) throws Exception { return bridge.fetchRegionObjectName(member, regionPath); } @Override public ObjectName[] fetchRegionObjectNames(ObjectName memberMBeanName) throws Exception { return bridge.fetchRegionObjectNames(memberMBeanName); } @Override public ObjectName[] listDistributedRegionObjectNames() { return bridge.listDistributedRegionObjectNames(); } @Override public ObjectName fetchCacheServerObjectName(String member, int port) throws Exception { return bridge.fetchCacheServerObjectName(member, port); } @Override public ObjectName fetchDiskStoreObjectName(String member, String diskStore) throws Exception { return bridge.fetchDiskStoreObjectName(member, diskStore); } @Override public ObjectName fetchDistributedLockServiceObjectName(String lockServiceName) throws Exception { return bridge.fetchDistributedLockServiceObjectName(lockServiceName); } @Override public ObjectName fetchGatewayReceiverObjectName(String member) throws Exception { return bridge.fetchGatewayReceiverObjectName(member); } @Override public ObjectName fetchGatewaySenderObjectName(String member, String senderId) throws Exception { return bridge.fetchGatewaySenderObjectName(member, senderId); } @Override public ObjectName fetchLockServiceObjectName(String member, String lockService) throws Exception { return bridge.fetchLockServiceObjectName(member, lockService); } @Override public ObjectName[] listCacheServerObjectNames() { return bridge.listCacheServerObjectNames(); } @Override public ObjectName[] listGatewayReceiverObjectNames() { return bridge.listGatewayReceiverObjectNames(); } @Override public ObjectName[] listGatewaySenderObjectNames() { return bridge.listGatewaySenderObjectNames(); } @Override public ObjectName[] listGatewaySenderObjectNames(String member) throws Exception { return bridge.listGatewaySenderObjectNames(member); } @Override public int getNumRunningFunctions() { return bridge.getNumRunningFunctions(); } @Override public long getRegisteredCQCount() { return bridge.getRegisteredCQCount(); } @Override public long getTotalDiskUsage() { return bridge.getTotalDiskUsage(); } @Override public float getAverageReads() { return bridge.getAverageReads(); } @Override public float getAverageWrites() { return bridge.getAverageWrites(); } @Override public long getUsedHeapSize() { return bridge.getUsedHeapSize(); } @Override public int getNumSubscriptions() { return bridge.getNumSubscriptions(); } @Override public long getGarbageCollectionCount() { return bridge.getGarbageCollectionCount(); } @Override public Map<String, Boolean> viewRemoteClusterStatus() { return bridge.viewRemoteClusterStatus(); } @Override public long getJVMPauses() { return bridge.getJVMPauses(); } @Override public String queryData(String queryString, String members, int limit) throws Exception { return bridge.queryData(queryString, members, limit); } @Override public byte[] queryDataForCompressedResult(String queryString, String members, int limit) throws Exception{ return bridge.queryDataForCompressedResult(queryString, members, limit); } @Override public long getOffHeapFreeMemory() { return bridge.getOffHeapFreeMemory(); } @Override public long getOffHeapUsedMemory() { return bridge.getOffHeapUsedMemory(); } @Override public int getTransactionCommitted() { return bridge.getTransactionCommitted(); } @Override public int getTransactionRolledBack() { return bridge.getTransactionRolledBack(); } @Override public long getMaxMemory() { return bridge.getTotalHeapSize(); } @Override public long getUsedMemory() { return bridge.getUsedHeapSize(); } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.apple; import com.facebook.buck.apple.toolchain.ApplePlatform; import com.facebook.buck.core.build.buildable.context.BuildableContext; import com.facebook.buck.core.build.context.BuildContext; import com.facebook.buck.core.exceptions.HumanReadableException; import com.facebook.buck.core.filesystems.AbsPath; import com.facebook.buck.core.filesystems.PathWrapper; import com.facebook.buck.core.filesystems.RelPath; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.model.Flavor; import com.facebook.buck.core.model.InternalFlavor; import com.facebook.buck.core.rulekey.AddToRuleKey; import com.facebook.buck.core.rules.BuildRule; import com.facebook.buck.core.rules.SourcePathRuleFinder; import com.facebook.buck.core.rules.common.BuildableSupport; import com.facebook.buck.core.rules.impl.AbstractBuildRule; import com.facebook.buck.core.sourcepath.ExplicitBuildTargetSourcePath; import com.facebook.buck.core.sourcepath.SourcePath; import com.facebook.buck.core.sourcepath.resolver.SourcePathResolverAdapter; import com.facebook.buck.core.toolchain.tool.Tool; import com.facebook.buck.io.filesystem.BuildCellRelativePath; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.io.filesystem.impl.ProjectFilesystemUtils; import com.facebook.buck.rules.modern.DefaultOutputPathResolver; import com.facebook.buck.rules.modern.OutputPath; import com.facebook.buck.rules.modern.OutputPathResolver; import com.facebook.buck.step.Step; import com.facebook.buck.step.fs.MakeCleanDirectoryStep; import com.facebook.buck.step.fs.MkdirStep; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import java.io.IOException; import java.nio.file.Path; import java.util.ArrayList; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.SortedSet; import java.util.function.Supplier; public class AppleAssetCatalog extends AbstractBuildRule { public static final Flavor FLAVOR = InternalFlavor.of("apple-asset-catalog"); private static final String BUNDLE_DIRECTORY_EXTENSION = ".bundle"; @AddToRuleKey private final ApplePlatform applePlatform; @AddToRuleKey private final String targetSDKVersion; @AddToRuleKey private final Optional<String> deviceFamily; @AddToRuleKey private final Optional<String> uiFrameworkFamily; @AddToRuleKey private final Tool actool; @AddToRuleKey private final ImmutableSortedSet<SourcePath> assetCatalogDirs; @AddToRuleKey private final OutputPath outputDirPath; @AddToRuleKey private final OutputPath plistOutputPath; @AddToRuleKey private final Optional<String> appIcon; @AddToRuleKey private final Optional<String> launchImage; @AddToRuleKey private final AppleAssetCatalogsCompilationOptions compilationOptions; private final Supplier<SortedSet<BuildRule>> buildDepsSupplier; @AddToRuleKey private final boolean withDownwardApi; private OutputPathResolver outputPathResolver; private static final ImmutableSet<String> TYPES_REQUIRING_CONTENTS_JSON = ImmutableSet.of( "appiconset", "brandassets", "cubetextureset", "dataset", "imageset", "imagestack", "launchimage", "mipmapset", "sticker", "stickerpack", "stickersequence", "textureset", "complicationset"); AppleAssetCatalog( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, SourcePathRuleFinder ruleFinder, ApplePlatform applePlatform, String targetSDKVersion, Optional<String> maybeDeviceFamily, Optional<String> maybeUIFrameworkFamily, Tool actool, ImmutableSortedSet<SourcePath> assetCatalogDirs, Optional<String> appIcon, Optional<String> launchImage, AppleAssetCatalogsCompilationOptions compilationOptions, String bundleName, boolean withDownwardApi) { super(buildTarget, projectFilesystem); this.applePlatform = applePlatform; this.targetSDKVersion = targetSDKVersion; this.deviceFamily = maybeDeviceFamily; this.uiFrameworkFamily = maybeUIFrameworkFamily; this.actool = actool; this.assetCatalogDirs = assetCatalogDirs; this.withDownwardApi = withDownwardApi; this.outputDirPath = new OutputPath(bundleName + BUNDLE_DIRECTORY_EXTENSION); this.plistOutputPath = new OutputPath("AssetCatalog.plist"); this.appIcon = appIcon; this.launchImage = launchImage; this.compilationOptions = compilationOptions; this.buildDepsSupplier = BuildableSupport.buildDepsSupplier(this, ruleFinder); this.outputPathResolver = new DefaultOutputPathResolver(getProjectFilesystem(), getBuildTarget()); } public SourcePath getSourcePathToPlist() { return ExplicitBuildTargetSourcePath.of(getBuildTarget(), getResolvedPlistPath()); } private RelPath getResolvedPlistPath() { return outputPathResolver.resolvePath(plistOutputPath); } private RelPath getResolvedOutputDirPath() { return outputPathResolver.resolvePath(outputDirPath); } @Override public ImmutableList<Step> getBuildSteps( BuildContext context, BuildableContext buildableContext) { ImmutableList.Builder<Step> stepsBuilder = ImmutableList.builder(); RelPath resolvedOutputDirPath = getResolvedOutputDirPath(); stepsBuilder.addAll( MakeCleanDirectoryStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), resolvedOutputDirPath))); RelPath resolvedPlistPath = getResolvedPlistPath(); stepsBuilder.add( MkdirStep.of( BuildCellRelativePath.fromCellRelativePath( context.getBuildCellRootPath(), getProjectFilesystem(), resolvedPlistPath.getParent()))); ImmutableSortedSet<AbsPath> absoluteAssetCatalogDirs = context.getSourcePathResolver().getAllAbsolutePaths(assetCatalogDirs); stepsBuilder.add( new ActoolStep( getProjectFilesystem().getRootPath(), applePlatform, targetSDKVersion, deviceFamily, uiFrameworkFamily, actool.getEnvironment(context.getSourcePathResolver()), actool.getCommandPrefix(context.getSourcePathResolver()), absoluteAssetCatalogDirs.stream() .map(PathWrapper::getPath) .collect(ImmutableSortedSet.toImmutableSortedSet(Comparator.naturalOrder())), getProjectFilesystem().resolve(resolvedOutputDirPath).getPath(), getProjectFilesystem().resolve(resolvedPlistPath).getPath(), appIcon, launchImage, compilationOptions, ProjectFilesystemUtils.relativize( getProjectFilesystem().getRootPath(), context.getBuildCellRootPath()), withDownwardApi)); buildableContext.recordArtifact(resolvedOutputDirPath.getPath()); buildableContext.recordArtifact(resolvedPlistPath.getPath()); return stepsBuilder.build(); } @Override public SourcePath getSourcePathToOutput() { return ExplicitBuildTargetSourcePath.of(getBuildTarget(), getResolvedOutputDirPath()); } public static void validateAssetCatalogs( ImmutableSortedSet<SourcePath> assetCatalogDirs, BuildTarget buildTarget, ProjectFilesystem projectFilesystem, SourcePathResolverAdapter sourcePathResolverAdapter, ValidationType validationType) throws HumanReadableException { HashMap<String, Path> catalogPathsForImageNames = new HashMap<>(); ArrayList<String> errors = new ArrayList<>(); for (SourcePath assetCatalogDir : assetCatalogDirs) { RelPath catalogPath = sourcePathResolverAdapter.getCellUnsafeRelPath(assetCatalogDir); if (!catalogPath.getFileName().toString().endsWith(".xcassets")) { errors.add( String.format( "Target %s had asset catalog dir %s - asset catalog dirs must end with .xcassets", buildTarget, catalogPath)); continue; } switch (validationType) { case XCODE: continue; case STRICT: strictlyValidateAssetCatalog( catalogPath, catalogPathsForImageNames, errors, projectFilesystem); } } if (!errors.isEmpty()) { throw new HumanReadableException( String.format("Asset catalogs invalid\n%s", String.join("\n", errors))); } } /* * Perform strict validation, guarding against missing Contents.json and duplicate image names. */ private static void strictlyValidateAssetCatalog( RelPath catalogPath, Map<String, Path> catalogPathsForImageNames, List<String> errors, ProjectFilesystem projectFilesystem) throws HumanReadableException { try { for (Path asset : projectFilesystem.getDirectoryContents(catalogPath.getPath())) { String assetName = asset.getFileName().toString(); if (assetName.equals("Contents.json")) { continue; } String[] parts = assetName.split("\\."); if (parts.length < 2) { errors.add(String.format("Unexpected file in %s: '%s'", catalogPath, assetName)); } String assetType = parts[parts.length - 1]; if (!TYPES_REQUIRING_CONTENTS_JSON.contains(assetType)) { continue; } boolean contentsJsonPresent = false; for (Path assetContentPath : projectFilesystem.getDirectoryContents(asset)) { String filename = assetContentPath.getFileName().toString(); if (filename.equals("Contents.json")) { contentsJsonPresent = true; continue; } if (!assetType.equals("imageset")) { continue; } // Lowercase asset name in case we're building on a case sensitive file system. String filenameKey = filename.toLowerCase(); if (catalogPathsForImageNames.containsKey(filenameKey)) { Path existingCatalogPath = catalogPathsForImageNames.get(filenameKey); if (catalogPath.getPath().equals(existingCatalogPath)) { continue; } else { // All asset catalogs (.xcassets directories) get merged into a single directory per // apple bundle. // Imagesets containing images with identical names can overwrite one another, this is // especially // problematic if two images share a name but are different errors.add( String.format( "%s is included by two asset catalogs: '%s' and '%s'", assetContentPath.getFileName(), catalogPath, existingCatalogPath)); } } else { catalogPathsForImageNames.put(filenameKey, catalogPath.getPath()); } } if (!contentsJsonPresent) { errors.add(String.format("%s doesn't have Contents.json", asset)); } } } catch (IOException e) { throw new HumanReadableException( "Failed to process asset catalog at %s: %s", catalogPath, e.getMessage()); } } @Override public SortedSet<BuildRule> getBuildDeps() { return buildDepsSupplier.get(); } public enum ValidationType { // Roughly match what Xcode is doing, only check whether the directory ends in .xcassets XCODE, // Guard against duplicate image names and missing Contents.json files STRICT } }
package main; import static org.junit.Assert.*; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.util.Arrays; import org.junit.Test; /** * This class shows how the algorithm works and can be called. See * {@link #testFindBusesToExtendBetween()} for more detail * * @author Christopher Olk * */ public class ExampleGridsTestCases { @Test /** * The methods called in this test showcase how the grid extension algorithm * works. The grids are identical to the ones presented in the paper and the * voltages and currents hardcoded here are the result of a loadflow * calculation */ public void testFindBusesToExtendBetween() { testRadialCurrentOverload(); testLoopCurrentOverload(); testMeshedCurrentOverload(); testRadialVoltageDeviation(); testLoopVoltageDeviation(); testMeshedVoltageDeviation(); } // #region Setups private void testRadialCurrentOverload() { Bus slack = mock(Bus.class); Bus b1 = mock(Bus.class); Bus b2 = mock(Bus.class); Bus b3 = mock(Bus.class); Bus b4 = mock(Bus.class); Bus b5 = mock(Bus.class); Bus b6 = mock(Bus.class); Bus b7 = mock(Bus.class); Bus b8 = mock(Bus.class); Bus b9 = mock(Bus.class); Bus b10 = mock(Bus.class); Bus b11 = mock(Bus.class); Bus b12 = mock(Bus.class); GridSection slackb1 = mock(GridSection.class); GridSection slackb7 = mock(GridSection.class); GridSection b1b2 = mock(GridSection.class); GridSection b2b3 = mock(GridSection.class); GridSection b3b4 = mock(GridSection.class); GridSection b4b5 = mock(GridSection.class); GridSection b5b6 = mock(GridSection.class); GridSection b7b8 = mock(GridSection.class); GridSection b8b9 = mock(GridSection.class); GridSection b9b10 = mock(GridSection.class); GridSection b10b11 = mock(GridSection.class); GridSection b11b12 = mock(GridSection.class); when(slackb1.getConnectedBuses()).thenReturn(new Bus[] { slack, b1 }); when(slackb7.getConnectedBuses()).thenReturn(new Bus[] { slack, b7 }); when(b1b2.getConnectedBuses()).thenReturn(new Bus[] { b1, b2 }); when(b2b3.getConnectedBuses()).thenReturn(new Bus[] { b2, b3 }); when(b3b4.getConnectedBuses()).thenReturn(new Bus[] { b3, b4 }); when(b4b5.getConnectedBuses()).thenReturn(new Bus[] { b4, b5 }); when(b5b6.getConnectedBuses()).thenReturn(new Bus[] { b5, b6 }); when(b7b8.getConnectedBuses()).thenReturn(new Bus[] { b7, b8 }); when(b8b9.getConnectedBuses()).thenReturn(new Bus[] { b8, b9 }); when(b9b10.getConnectedBuses()).thenReturn(new Bus[] { b9, b10 }); when(b10b11.getConnectedBuses()).thenReturn(new Bus[] { b10, b11 }); when(b11b12.getConnectedBuses()).thenReturn(new Bus[] { b11, b12 }); when(slackb1.getAbsSpecificCurrent()).thenReturn(40.99259916 / 275); when(slackb7.getAbsSpecificCurrent()).thenReturn(73.18490378 / 275); when(b1b2.getAbsSpecificCurrent()).thenReturn(34.26192197 / 275); when(b2b3.getAbsSpecificCurrent()).thenReturn(27.47661157 / 275); when(b3b4.getAbsSpecificCurrent()).thenReturn(20.64691148 / 275); when(b4b5.getAbsSpecificCurrent()).thenReturn(13.78351268 / 275); when(b5b6.getAbsSpecificCurrent()).thenReturn(6.897451658 / 275); when(b7b8.getAbsSpecificCurrent()).thenReturn(61.31659695 / 275); when(b8b9.getAbsSpecificCurrent()).thenReturn(49.27282644 / 275); when(b9b10.getAbsSpecificCurrent()).thenReturn(37.0846535 / 275); when(b10b11.getAbsSpecificCurrent()).thenReturn(24.78574048 / 275); when(b11b12.getAbsSpecificCurrent()).thenReturn(12.41180751 / 275); when(slack.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { slackb1, slackb7 })); when(b1.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { slackb1, b1b2 })); when(b7.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { slackb7, b7b8 })); when(b2.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b1b2, b2b3 })); when(b3.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b2b3, b3b4 })); when(b4.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b3b4, b4b5 })); when(b5.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b4b5, b5b6 })); when(b6.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b5b6 })); when(b8.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b7b8, b8b9 })); when(b9.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b8b9, b9b10 })); when(b10.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b9b10, b10b11 })); when(b11.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b10b11, b11b12 })); when(b12.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b11b12 })); when(slack.getPuVoltage()).thenReturn(1.0); when(b1.getPuVoltage()).thenReturn(0.990464103); when(b2.getPuVoltage()).thenReturn(0.982502702); when(b3.getPuVoltage()).thenReturn(0.976123884); when(b4.getPuVoltage()).thenReturn(0.971334115); when(b5.getPuVoltage()).thenReturn(0.968138244); when(b6.getPuVoltage()).thenReturn(0.966539502); when(b7.getPuVoltage()).thenReturn(0.982924914); when(b8.getPuVoltage()).thenReturn(0.968646276); when(b9.getPuVoltage()).thenReturn(0.957191007); when(b10.getPuVoltage()).thenReturn(0.94858068); when(b11.getPuVoltage()).thenReturn(0.942831478); when(b12.getPuVoltage()).thenReturn(0.939954184); GridNavigator navigator = mock(GridNavigator.class); when(navigator.getShortestRoute(b12, slack)) .thenReturn(new GridSection[] { b11b12, b10b11, b9b10, b8b9, b7b8, slackb7 }); when(navigator.getShortestRoute(b6, slack)) .thenReturn(new GridSection[] { b5b6, b4b5, b3b4, b2b3, b1b2, slackb1 }); GenericLVGridExtension extender = new GenericLVGridExtension(navigator); extender.reportCurrentOverload(slackb7); Bus[] result = extender.findBusesToExtendBetween(); assertTrue(result[1] == slack); assertTrue(result[0] == b9); } private void testLoopCurrentOverload() { Bus slack = mock(Bus.class); Bus b1 = mock(Bus.class); Bus b2 = mock(Bus.class); Bus b3 = mock(Bus.class); Bus b4 = mock(Bus.class); Bus b5 = mock(Bus.class); Bus b6 = mock(Bus.class); Bus b7 = mock(Bus.class); Bus b8 = mock(Bus.class); Bus b9 = mock(Bus.class); Bus b10 = mock(Bus.class); Bus b11 = mock(Bus.class); Bus b12 = mock(Bus.class); GridSection slackb1 = mock(GridSection.class); GridSection slackb7 = mock(GridSection.class); GridSection b1b2 = mock(GridSection.class); GridSection b2b3 = mock(GridSection.class); GridSection b3b4 = mock(GridSection.class); GridSection b4b5 = mock(GridSection.class); GridSection b5b6 = mock(GridSection.class); GridSection b7b8 = mock(GridSection.class); GridSection b8b9 = mock(GridSection.class); GridSection b9b10 = mock(GridSection.class); GridSection b10b11 = mock(GridSection.class); GridSection b11b12 = mock(GridSection.class); GridSection b6b12 = mock(GridSection.class); when(slackb1.getConnectedBuses()).thenReturn(new Bus[] { slack, b1 }); when(slackb7.getConnectedBuses()).thenReturn(new Bus[] { slack, b7 }); when(b1b2.getConnectedBuses()).thenReturn(new Bus[] { b1, b2 }); when(b2b3.getConnectedBuses()).thenReturn(new Bus[] { b2, b3 }); when(b3b4.getConnectedBuses()).thenReturn(new Bus[] { b3, b4 }); when(b4b5.getConnectedBuses()).thenReturn(new Bus[] { b4, b5 }); when(b5b6.getConnectedBuses()).thenReturn(new Bus[] { b5, b6 }); when(b7b8.getConnectedBuses()).thenReturn(new Bus[] { b7, b8 }); when(b8b9.getConnectedBuses()).thenReturn(new Bus[] { b8, b9 }); when(b9b10.getConnectedBuses()).thenReturn(new Bus[] { b9, b10 }); when(b10b11.getConnectedBuses()).thenReturn(new Bus[] { b10, b11 }); when(b11b12.getConnectedBuses()).thenReturn(new Bus[] { b11, b12 }); when(b6b12.getConnectedBuses()).thenReturn(new Bus[] { b6, b12 }); when(slackb1.getAbsSpecificCurrent()).thenReturn(49.77579465 / 275); when(slackb7.getAbsSpecificCurrent()).thenReturn(64.14341095 / 275); when(b1b2.getAbsSpecificCurrent()).thenReturn(43.03129665 / 275); when(b2b3.getAbsSpecificCurrent()).thenReturn(36.21758883 / 275); when(b3b4.getAbsSpecificCurrent()).thenReturn(29.34463677 / 275); when(b4b5.getAbsSpecificCurrent()).thenReturn(22.42300371 / 275); when(b5b6.getAbsSpecificCurrent()).thenReturn(15.46375556 / 275); when(b7b8.getAbsSpecificCurrent()).thenReturn(52.30033834 / 275); when(b8b9.getAbsSpecificCurrent()).thenReturn(40.30886681 / 275); when(b9b10.getAbsSpecificCurrent()).thenReturn(28.2007319 / 275); when(b10b11.getAbsSpecificCurrent()).thenReturn(16.00976495 / 275); when(b11b12.getAbsSpecificCurrent()).thenReturn(3.771321928 / 275); when(b6b12.getAbsSpecificCurrent()).thenReturn(8.47835391 / 275); when(slack.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { slackb1, slackb7 })); when(b1.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { slackb1, b1b2 })); when(b7.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { slackb7, b7b8 })); when(b2.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b1b2, b2b3 })); when(b3.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b2b3, b3b4 })); when(b4.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b3b4, b4b5 })); when(b5.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b4b5, b5b6 })); when(b6.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b5b6, b6b12 })); when(b8.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b7b8, b8b9 })); when(b9.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b8b9, b9b10 })); when(b10.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b9b10, b10b11 })); when(b11.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b10b11, b11b12 })); when(b12.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b11b12, b6b12 })); when(slack.getPuVoltage()).thenReturn(1.0); when(b1.getPuVoltage()).thenReturn(0.988389006); when(b2.getPuVoltage()).thenReturn(0.978344568); when(b3.getPuVoltage()).thenReturn(0.969885733); when(b4.getPuVoltage()).thenReturn(0.963028838); when(b5.getPuVoltage()).thenReturn(0.957787327); when(b6.getPuVoltage()).thenReturn(0.954171602); when(b7.getPuVoltage()).thenReturn(0.985036178); when(b8.getPuVoltage()).thenReturn(0.972824883); when(b9.getPuVoltage()).thenReturn(0.963406995); when(b10.getPuVoltage()).thenReturn(0.956814725); when(b11.getPuVoltage()).thenReturn(0.953070979); when(b12.getPuVoltage()).thenReturn(0.952188901); GridNavigator navigator = mock(GridNavigator.class); when(navigator.getShortestRoute(b12, slack)) .thenReturn(new GridSection[] { b11b12, b10b11, b9b10, b8b9, b7b8, slackb7 }); when(navigator.getShortestRoute(b6, slack)) .thenReturn(new GridSection[] { b5b6, b4b5, b3b4, b2b3, b1b2, slackb1 }); GenericLVGridExtension extender = new GenericLVGridExtension(navigator); extender.reportCurrentOverload(slackb7); Bus[] result = extender.findBusesToExtendBetween(); assertTrue(result[1] == slack); assertTrue(result[0] == b9); } private void testMeshedCurrentOverload() { Bus slack = mock(Bus.class); Bus b1 = mock(Bus.class); Bus b2 = mock(Bus.class); Bus b3 = mock(Bus.class); Bus b4 = mock(Bus.class); Bus b5 = mock(Bus.class); Bus b6 = mock(Bus.class); Bus b7 = mock(Bus.class); Bus b8 = mock(Bus.class); Bus b9 = mock(Bus.class); Bus b10 = mock(Bus.class); Bus b11 = mock(Bus.class); Bus b12 = mock(Bus.class); GridSection slackb1 = mock(GridSection.class); GridSection slackb7 = mock(GridSection.class); GridSection b1b2 = mock(GridSection.class); GridSection b2b3 = mock(GridSection.class); GridSection b3b4 = mock(GridSection.class); GridSection b4b5 = mock(GridSection.class); GridSection b5b6 = mock(GridSection.class); GridSection b7b8 = mock(GridSection.class); GridSection b8b9 = mock(GridSection.class); GridSection b9b10 = mock(GridSection.class); GridSection b10b11 = mock(GridSection.class); GridSection b11b12 = mock(GridSection.class); GridSection b1b8 = mock(GridSection.class); GridSection b2b9 = mock(GridSection.class); GridSection b3b10 = mock(GridSection.class); GridSection b4b11 = mock(GridSection.class); GridSection b5b12 = mock(GridSection.class); when(slackb1.getConnectedBuses()).thenReturn(new Bus[] { slack, b1 }); when(slackb7.getConnectedBuses()).thenReturn(new Bus[] { slack, b7 }); when(b1b2.getConnectedBuses()).thenReturn(new Bus[] { b1, b2 }); when(b2b3.getConnectedBuses()).thenReturn(new Bus[] { b2, b3 }); when(b3b4.getConnectedBuses()).thenReturn(new Bus[] { b3, b4 }); when(b4b5.getConnectedBuses()).thenReturn(new Bus[] { b4, b5 }); when(b5b6.getConnectedBuses()).thenReturn(new Bus[] { b5, b6 }); when(b7b8.getConnectedBuses()).thenReturn(new Bus[] { b7, b8 }); when(b8b9.getConnectedBuses()).thenReturn(new Bus[] { b8, b9 }); when(b9b10.getConnectedBuses()).thenReturn(new Bus[] { b9, b10 }); when(b10b11.getConnectedBuses()).thenReturn(new Bus[] { b10, b11 }); when(b11b12.getConnectedBuses()).thenReturn(new Bus[] { b11, b12 }); when(b1b8.getConnectedBuses()).thenReturn(new Bus[] { b1, b8 }); when(b2b9.getConnectedBuses()).thenReturn(new Bus[] { b2, b9 }); when(b3b10.getConnectedBuses()).thenReturn(new Bus[] { b3, b10 }); when(b4b11.getConnectedBuses()).thenReturn(new Bus[] { b4, b11 }); when(b5b12.getConnectedBuses()).thenReturn(new Bus[] { b5, b12 }); when(slackb1.getAbsSpecificCurrent()).thenReturn(65.78612203 / 275); when(slackb7.getAbsSpecificCurrent()).thenReturn(47.73623652 / 275); when(b1b2.getAbsSpecificCurrent()).thenReturn(46.38849452 / 275); when(b2b3.getAbsSpecificCurrent()).thenReturn(33.77675505 / 275); when(b3b4.getAbsSpecificCurrent()).thenReturn(23.46934726 / 275); when(b4b5.getAbsSpecificCurrent()).thenReturn(14.33610058 / 275); when(b5b6.getAbsSpecificCurrent()).thenReturn(6.975392047 / 275); when(b7b8.getAbsSpecificCurrent()).thenReturn(35.93912163 / 275); when(b8b9.getAbsSpecificCurrent()).thenReturn(36.66898614 / 275); when(b9b10.getAbsSpecificCurrent()).thenReturn(30.43365246 / 275); when(b10b11.getAbsSpecificCurrent()).thenReturn(21.75063583 / 275); when(b11b12.getAbsSpecificCurrent()).thenReturn(11.79093736 / 275); when(b1b8.getAbsSpecificCurrent()).thenReturn(12.62765834 / 275); when(b2b9.getAbsSpecificCurrent()).thenReturn(5.766775958 / 275); when(b3b10.getAbsSpecificCurrent()).thenReturn(3.406897916 / 275); when(b4b11.getAbsSpecificCurrent()).thenReturn(2.193670902 / 275); when(b5b12.getAbsSpecificCurrent()).thenReturn(0.397094649 / 275); when(slack.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { slackb1, slackb7 })); when(b1.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { slackb1, b1b2, b1b8 })); when(b7.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { slackb7, b7b8 })); when(b2.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b1b2, b2b3, b2b9 })); when(b3.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b2b3, b3b4, b3b10 })); when(b4.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b3b4, b4b5, b4b11 })); when(b5.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b4b5, b5b6, b5b12 })); when(b6.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b5b6 })); when(b8.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b7b8, b8b9, b1b8 })); when(b9.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b8b9, b9b10, b2b9 })); when(b10.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b9b10, b10b11, b3b10 })); when(b11.getConnectedPowerGridSections()) .thenReturn(Arrays.asList(new GridSection[] { b10b11, b11b12, b4b11 })); when(b12.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b11b12, b5b12 })); when(slack.getPuVoltage()).thenReturn(1.0); when(b1.getPuVoltage()).thenReturn(0.984680818); when(b2.getPuVoltage()).thenReturn(0.973897989); when(b3.getPuVoltage()).thenReturn(0.966056279); when(b4.getPuVoltage()).thenReturn(0.960612259); when(b5.getPuVoltage()).thenReturn(0.957288653); when(b6.getPuVoltage()).thenReturn(0.955671862); when(b7.getPuVoltage()).thenReturn(0.988886683); when(b8.getPuVoltage()).thenReturn(0.980525585); when(b9.getPuVoltage()).thenReturn(0.972003567); when(b10.getPuVoltage()).thenReturn(0.964938024); when(b11.getPuVoltage()).thenReturn(0.959892528); when(b12.getPuVoltage()).thenReturn(0.957158856); GridNavigator navigator = mock(GridNavigator.class); when(navigator.getShortestRoute(b12, slack)) .thenReturn(new GridSection[] { b11b12, b10b11, b9b10, b8b9, b7b8, slackb7 }); when(navigator.getShortestRoute(b6, slack)) .thenReturn(new GridSection[] { b5b6, b4b5, b3b4, b2b3, b1b2, slackb1 }); GenericLVGridExtension extender = new GenericLVGridExtension(navigator); extender.reportCurrentOverload(slackb1); Bus[] result = extender.findBusesToExtendBetween(); assertTrue(result[1] == slack); assertTrue(result[0] == b2); } private void testRadialVoltageDeviation() { Bus slack = mock(Bus.class); Bus b1 = mock(Bus.class); Bus b2 = mock(Bus.class); Bus b3 = mock(Bus.class); Bus b4 = mock(Bus.class); Bus b5 = mock(Bus.class); Bus b6 = mock(Bus.class); Bus b7 = mock(Bus.class); Bus b8 = mock(Bus.class); Bus b9 = mock(Bus.class); Bus b10 = mock(Bus.class); Bus b11 = mock(Bus.class); Bus b12 = mock(Bus.class); GridSection slackb1 = mock(GridSection.class); GridSection slackb7 = mock(GridSection.class); GridSection b1b2 = mock(GridSection.class); GridSection b2b3 = mock(GridSection.class); GridSection b3b4 = mock(GridSection.class); GridSection b4b5 = mock(GridSection.class); GridSection b5b6 = mock(GridSection.class); GridSection b7b8 = mock(GridSection.class); GridSection b8b9 = mock(GridSection.class); GridSection b9b10 = mock(GridSection.class); GridSection b10b11 = mock(GridSection.class); GridSection b11b12 = mock(GridSection.class); when(slackb1.getConnectedBuses()).thenReturn(new Bus[] { slack, b1 }); when(slackb7.getConnectedBuses()).thenReturn(new Bus[] { slack, b7 }); when(b1b2.getConnectedBuses()).thenReturn(new Bus[] { b1, b2 }); when(b2b3.getConnectedBuses()).thenReturn(new Bus[] { b2, b3 }); when(b3b4.getConnectedBuses()).thenReturn(new Bus[] { b3, b4 }); when(b4b5.getConnectedBuses()).thenReturn(new Bus[] { b4, b5 }); when(b5b6.getConnectedBuses()).thenReturn(new Bus[] { b5, b6 }); when(b7b8.getConnectedBuses()).thenReturn(new Bus[] { b7, b8 }); when(b8b9.getConnectedBuses()).thenReturn(new Bus[] { b8, b9 }); when(b9b10.getConnectedBuses()).thenReturn(new Bus[] { b9, b10 }); when(b10b11.getConnectedBuses()).thenReturn(new Bus[] { b10, b11 }); when(b11b12.getConnectedBuses()).thenReturn(new Bus[] { b11, b12 }); when(slackb1.getAbsSpecificCurrent()).thenReturn(40.99259916 / 275); when(slackb7.getAbsSpecificCurrent()).thenReturn(62.30298268 / 275); when(b1b2.getAbsSpecificCurrent()).thenReturn(34.26192197 / 275); when(b2b3.getAbsSpecificCurrent()).thenReturn(27.47661157 / 275); when(b3b4.getAbsSpecificCurrent()).thenReturn(20.64691148 / 275); when(b4b5.getAbsSpecificCurrent()).thenReturn(13.78351268 / 275); when(b5b6.getAbsSpecificCurrent()).thenReturn(6.897451658 / 275); when(b7b8.getAbsSpecificCurrent()).thenReturn(52.15625275 / 275); when(b8b9.getAbsSpecificCurrent()).thenReturn(41.88268079 / 275); when(b9b10.getAbsSpecificCurrent()).thenReturn(31.50517837 / 275); when(b10b11.getAbsSpecificCurrent()).thenReturn(21.04825056 / 275); when(b11b12.getAbsSpecificCurrent()).thenReturn(10.53765251 / 275); when(slack.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { slackb1, slackb7 })); when(b1.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { slackb1, b1b2 })); when(b7.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { slackb7, b7b8 })); when(b2.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b1b2, b2b3 })); when(b3.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b2b3, b3b4 })); when(b4.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b3b4, b4b5 })); when(b5.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b4b5, b5b6 })); when(b6.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b5b6 })); when(b8.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b7b8, b8b9 })); when(b9.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b8b9, b9b10 })); when(b10.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b9b10, b10b11 })); when(b11.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b10b11, b11b12 })); when(b12.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b11b12 })); when(slack.getPuVoltage()).thenReturn(1.0); when(b1.getPuVoltage()).thenReturn(0.990455618); when(b2.getPuVoltage()).thenReturn(0.982473953); when(b3.getPuVoltage()).thenReturn(0.976070076); when(b4.getPuVoltage()).thenReturn(0.971256251); when(b5.getPuVoltage()).thenReturn(0.9680418); when(b6.getPuVoltage()).thenReturn(0.966432997); when(b7.getPuVoltage()).thenReturn(0.98545917); when(b8.getPuVoltage()).thenReturn(0.973276432); when(b9.getPuVoltage()).thenReturn(0.963486696); when(b10.getPuVoltage()).thenReturn(0.956118634); when(b11.getPuVoltage()).thenReturn(0.951194195); when(b12.getPuVoltage()).thenReturn(0.94872823); GridNavigator navigator = mock(GridNavigator.class); when(navigator.getShortestRoute(b12, slack)) .thenReturn(new GridSection[] { b11b12, b10b11, b9b10, b8b9, b7b8, slackb7 }); when(navigator.getShortestRoute(b6, slack)) .thenReturn(new GridSection[] { b5b6, b4b5, b3b4, b2b3, b1b2, slackb1 }); GenericLVGridExtension extender = new GenericLVGridExtension(navigator); extender.reportVoltageOverload(b12); ; Bus[] result = extender.findBusesToExtendBetween(); assertTrue(result[1] == slack); assertTrue(result[0] == b8); } private void testLoopVoltageDeviation() { Bus slack = mock(Bus.class); Bus b1 = mock(Bus.class); Bus b2 = mock(Bus.class); Bus b3 = mock(Bus.class); Bus b4 = mock(Bus.class); Bus b5 = mock(Bus.class); Bus b6 = mock(Bus.class); Bus b7 = mock(Bus.class); Bus b8 = mock(Bus.class); Bus b9 = mock(Bus.class); Bus b10 = mock(Bus.class); Bus b11 = mock(Bus.class); Bus b12 = mock(Bus.class); GridSection slackb1 = mock(GridSection.class); GridSection slackb7 = mock(GridSection.class); GridSection b1b2 = mock(GridSection.class); GridSection b2b3 = mock(GridSection.class); GridSection b3b4 = mock(GridSection.class); GridSection b4b5 = mock(GridSection.class); GridSection b5b6 = mock(GridSection.class); GridSection b7b8 = mock(GridSection.class); GridSection b8b9 = mock(GridSection.class); GridSection b9b10 = mock(GridSection.class); GridSection b10b11 = mock(GridSection.class); GridSection b11b12 = mock(GridSection.class); GridSection b6b12 = mock(GridSection.class); when(slackb1.getConnectedBuses()).thenReturn(new Bus[] { slack, b1 }); when(slackb7.getConnectedBuses()).thenReturn(new Bus[] { slack, b7 }); when(b1b2.getConnectedBuses()).thenReturn(new Bus[] { b1, b2 }); when(b2b3.getConnectedBuses()).thenReturn(new Bus[] { b2, b3 }); when(b3b4.getConnectedBuses()).thenReturn(new Bus[] { b3, b4 }); when(b4b5.getConnectedBuses()).thenReturn(new Bus[] { b4, b5 }); when(b5b6.getConnectedBuses()).thenReturn(new Bus[] { b5, b6 }); when(b7b8.getConnectedBuses()).thenReturn(new Bus[] { b7, b8 }); when(b8b9.getConnectedBuses()).thenReturn(new Bus[] { b8, b9 }); when(b9b10.getConnectedBuses()).thenReturn(new Bus[] { b9, b10 }); when(b10b11.getConnectedBuses()).thenReturn(new Bus[] { b10, b11 }); when(b11b12.getConnectedBuses()).thenReturn(new Bus[] { b11, b12 }); when(b6b12.getConnectedBuses()).thenReturn(new Bus[] { b6, b12 }); when(slackb1.getAbsSpecificCurrent()).thenReturn(46.81769445 / 275); when(slackb7.getAbsSpecificCurrent()).thenReturn(56.36144563 / 275); when(b1b2.getAbsSpecificCurrent()).thenReturn(40.07793907 / 275); when(b2b3.getAbsSpecificCurrent()).thenReturn(33.2740025 / 275); when(b3b4.getAbsSpecificCurrent()).thenReturn(26.41595043 / 275); when(b4b5.getAbsSpecificCurrent()).thenReturn(19.51439042 / 275); when(b5b6.getAbsSpecificCurrent()).thenReturn(12.58037464 / 275); when(b7b8.getAbsSpecificCurrent()).thenReturn(46.22903062 / 275); when(b8b9.getAbsSpecificCurrent()).thenReturn(35.98497712 / 275); when(b9b10.getAbsSpecificCurrent()).thenReturn(25.65250215 / 275); when(b10b11.getAbsSpecificCurrent()).thenReturn(15.25616534 / 275); when(b11b12.getAbsSpecificCurrent()).thenReturn(4.821516533 / 275); when(b6b12.getAbsSpecificCurrent()).thenReturn(5.625292141 / 275); when(slack.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { slackb1, slackb7 })); when(b1.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { slackb1, b1b2 })); when(b7.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { slackb7, b7b8 })); when(b2.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b1b2, b2b3 })); when(b3.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b2b3, b3b4 })); when(b4.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b3b4, b4b5 })); when(b5.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b4b5, b5b6 })); when(b6.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b5b6, b6b12 })); when(b8.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b7b8, b8b9 })); when(b9.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b8b9, b9b10 })); when(b10.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b9b10, b10b11 })); when(b11.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b10b11, b11b12 })); when(b12.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b11b12, b6b12 })); when(slack.getPuVoltage()).thenReturn(1.0); when(b1.getPuVoltage()).thenReturn(0.989086221); when(b2.getPuVoltage()).thenReturn(0.979737679); when(b3.getPuVoltage()).thenReturn(0.971972052); when(b4.getPuVoltage()).thenReturn(0.965804272); when(b5.getPuVoltage()).thenReturn(0.961246362); when(b6.getPuVoltage()).thenReturn(0.958307301); when(b7.getPuVoltage()).thenReturn(0.986860542); when(b8.getPuVoltage()).thenReturn(0.976075209); when(b9.getPuVoltage()).thenReturn(0.967674769); when(b10.getPuVoltage()).thenReturn(0.961683642); when(b11.getPuVoltage()).thenReturn(0.95811949); when(b12.getPuVoltage()).thenReturn(0.95699292); GridNavigator navigator = mock(GridNavigator.class); when(navigator.getShortestRoute(b12, slack)) .thenReturn(new GridSection[] { b11b12, b10b11, b9b10, b8b9, b7b8, slackb7 }); when(navigator.getShortestRoute(b6, slack)) .thenReturn(new GridSection[] { b5b6, b4b5, b3b4, b2b3, b1b2, slackb1 }); GenericLVGridExtension extender = new GenericLVGridExtension(navigator); extender.reportVoltageOverload(b6); Bus[] result = extender.findBusesToExtendBetween(); assertTrue(result[1] == slack); assertTrue(result[0] == b8); } private void testMeshedVoltageDeviation() { Bus slack = mock(Bus.class); Bus b1 = mock(Bus.class); Bus b2 = mock(Bus.class); Bus b3 = mock(Bus.class); Bus b4 = mock(Bus.class); Bus b5 = mock(Bus.class); Bus b6 = mock(Bus.class); Bus b7 = mock(Bus.class); Bus b8 = mock(Bus.class); Bus b9 = mock(Bus.class); Bus b10 = mock(Bus.class); Bus b11 = mock(Bus.class); Bus b12 = mock(Bus.class); GridSection slackb1 = mock(GridSection.class); GridSection slackb7 = mock(GridSection.class); GridSection b1b2 = mock(GridSection.class); GridSection b2b3 = mock(GridSection.class); GridSection b3b4 = mock(GridSection.class); GridSection b4b5 = mock(GridSection.class); GridSection b5b6 = mock(GridSection.class); GridSection b7b8 = mock(GridSection.class); GridSection b8b9 = mock(GridSection.class); GridSection b9b10 = mock(GridSection.class); GridSection b10b11 = mock(GridSection.class); GridSection b11b12 = mock(GridSection.class); GridSection b1b8 = mock(GridSection.class); GridSection b2b9 = mock(GridSection.class); GridSection b3b10 = mock(GridSection.class); GridSection b4b11 = mock(GridSection.class); GridSection b5b12 = mock(GridSection.class); when(slackb1.getConnectedBuses()).thenReturn(new Bus[] { slack, b1 }); when(slackb7.getConnectedBuses()).thenReturn(new Bus[] { slack, b7 }); when(b1b2.getConnectedBuses()).thenReturn(new Bus[] { b1, b2 }); when(b2b3.getConnectedBuses()).thenReturn(new Bus[] { b2, b3 }); when(b3b4.getConnectedBuses()).thenReturn(new Bus[] { b3, b4 }); when(b4b5.getConnectedBuses()).thenReturn(new Bus[] { b4, b5 }); when(b5b6.getConnectedBuses()).thenReturn(new Bus[] { b5, b6 }); when(b7b8.getConnectedBuses()).thenReturn(new Bus[] { b7, b8 }); when(b8b9.getConnectedBuses()).thenReturn(new Bus[] { b8, b9 }); when(b9b10.getConnectedBuses()).thenReturn(new Bus[] { b9, b10 }); when(b10b11.getConnectedBuses()).thenReturn(new Bus[] { b10, b11 }); when(b11b12.getConnectedBuses()).thenReturn(new Bus[] { b11, b12 }); when(b1b8.getConnectedBuses()).thenReturn(new Bus[] { b1, b8 }); when(b2b9.getConnectedBuses()).thenReturn(new Bus[] { b2, b9 }); when(b3b10.getConnectedBuses()).thenReturn(new Bus[] { b3, b10 }); when(b4b11.getConnectedBuses()).thenReturn(new Bus[] { b4, b11 }); when(b5b12.getConnectedBuses()).thenReturn(new Bus[] { b5, b12 }); when(slackb1.getAbsSpecificCurrent()).thenReturn(60.08866967 / 275); when(slackb7.getAbsSpecificCurrent()).thenReturn(42.8340611 / 275); when(b1b2.getAbsSpecificCurrent()).thenReturn(42.40127271 / 275); when(b2b3.getAbsSpecificCurrent()).thenReturn(30.94174157 / 275); when(b3b4.getAbsSpecificCurrent()).thenReturn(21.61240689 / 275); when(b4b5.getAbsSpecificCurrent()).thenReturn(13.40418069 / 275); when(b5b6.getAbsSpecificCurrent()).thenReturn(6.949517207 / 275); when(b7b8.getAbsSpecificCurrent()).thenReturn(32.73384303 / 275); when(b8b9.getAbsSpecificCurrent()).thenReturn(33.48177734 / 275); when(b9b10.getAbsSpecificCurrent()).thenReturn(27.85230156 / 275); when(b10b11.getAbsSpecificCurrent()).thenReturn(19.97354626 / 275); when(b11b12.getAbsSpecificCurrent()).thenReturn(10.88833183 / 275); when(b1b8.getAbsSpecificCurrent()).thenReturn(10.92649086 / 275); when(b2b9.getAbsSpecificCurrent()).thenReturn(4.630341629 / 275); when(b3b10.getAbsSpecificCurrent()).thenReturn(2.449534804 / 275); when(b4b11.getAbsSpecificCurrent()).thenReturn(1.292680945 / 275); when(b5b12.getAbsSpecificCurrent()).thenReturn(0.483235252 / 275); when(slack.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { slackb1, slackb7 })); when(b1.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { slackb1, b1b2, b1b8 })); when(b7.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { slackb7, b7b8 })); when(b2.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b1b2, b2b3, b2b9 })); when(b3.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b2b3, b3b4, b3b10 })); when(b4.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b3b4, b4b5, b4b11 })); when(b5.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b4b5, b5b6, b5b12 })); when(b6.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b5b6 })); when(b8.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b7b8, b8b9, b1b8 })); when(b9.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b8b9, b9b10, b2b9 })); when(b10.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b9b10, b10b11, b3b10 })); when(b11.getConnectedPowerGridSections()) .thenReturn(Arrays.asList(new GridSection[] { b10b11, b11b12, b4b11 })); when(b12.getConnectedPowerGridSections()).thenReturn(Arrays.asList(new GridSection[] { b11b12, b5b12 })); when(slack.getPuVoltage()).thenReturn(1.0); when(b1.getPuVoltage()).thenReturn(0.985995263); when(b2.getPuVoltage()).thenReturn(0.976106792); when(b3.getPuVoltage()).thenReturn(0.968886946); when(b4.getPuVoltage()).thenReturn(0.963841894); when(b5.getPuVoltage()).thenReturn(0.96071197); when(b6.getPuVoltage()).thenReturn(0.959088802); when(b7.getPuVoltage()).thenReturn(0.99002215); when(b8.getPuVoltage()).thenReturn(0.982389553); when(b9.getPuVoltage()).thenReturn(0.974578585); when(b10.getPuVoltage()).thenReturn(0.968078141); when(b11.getPuVoltage()).thenReturn(0.963414961); when(b12.getPuVoltage()).thenReturn(0.960872325); GridNavigator navigator = mock(GridNavigator.class); when(navigator.getShortestRoute(b12, slack)) .thenReturn(new GridSection[] { b11b12, b10b11, b9b10, b8b9, b7b8, slackb7 }); when(navigator.getShortestRoute(b6, slack)) .thenReturn(new GridSection[] { b5b6, b4b5, b3b4, b2b3, b1b2, slackb1 }); GenericLVGridExtension extender = new GenericLVGridExtension(navigator); extender.reportVoltageOverload(b12); Bus[] result = extender.findBusesToExtendBetween(); assertTrue(result[1] == slack); assertTrue(result[0] == b2); } // #endregion }
package org.zendesk.client.v2; import org.junit.After; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import org.zendesk.client.v2.model.Audit; import org.zendesk.client.v2.model.Collaborator; import org.zendesk.client.v2.model.Comment; import org.zendesk.client.v2.model.Field; import org.zendesk.client.v2.model.Group; import org.zendesk.client.v2.model.Identity; import org.zendesk.client.v2.model.JobStatus; import org.zendesk.client.v2.model.Organization; import org.zendesk.client.v2.model.Request; import org.zendesk.client.v2.model.Status; import org.zendesk.client.v2.model.SuspendedTicket; import org.zendesk.client.v2.model.Ticket; import org.zendesk.client.v2.model.TicketForm; import org.zendesk.client.v2.model.User; import org.zendesk.client.v2.model.events.Event; import org.zendesk.client.v2.model.hc.Article; import org.zendesk.client.v2.model.hc.Category; import org.zendesk.client.v2.model.hc.Section; import org.zendesk.client.v2.model.hc.Translation; import org.zendesk.client.v2.model.targets.Target; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.HashMap; import java.util.Properties; import java.util.UUID; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assume.assumeThat; /** * @author stephenc * @since 04/04/2013 13:57 */ public class RealSmokeTest { private static Properties config; private Zendesk instance; @BeforeClass public static void loadConfig() { config = ZendeskConfig.load(); assumeThat("We have a configuration", config, notNullValue()); assertThat("Configuration has an url", config.getProperty("url"), notNullValue()); } public void assumeHaveToken() { assumeThat("We have a username", config.getProperty("username"), notNullValue()); assumeThat("We have a token", config.getProperty("token"), notNullValue()); } public void assumeHavePassword() { assumeThat("We have a username", config.getProperty("username"), notNullValue()); assumeThat("We have a password", config.getProperty("password"), notNullValue()); } public void assumeHaveTokenOrPassword() { assumeThat("We have a username", config.getProperty("username"), notNullValue()); assumeThat("We have a token or password", config.getProperty("token") != null || config.getProperty("password") != null, is( true)); } @After public void closeClient() { if (instance != null) { instance.close(); } instance = null; } @Test public void createClientWithToken() throws Exception { assumeHaveToken(); instance = new Zendesk.Builder(config.getProperty("url")) .setUsername(config.getProperty("username")) .setToken(config.getProperty("token")) .build(); } @Test public void createClientWithTokenOrPassword() throws Exception { assumeHaveTokenOrPassword(); final Zendesk.Builder builder = new Zendesk.Builder(config.getProperty("url")) .setUsername(config.getProperty("username")); if (config.getProperty("token") != null) { builder.setToken(config.getProperty("token")); } else if (config.getProperty("password") != null) { builder.setPassword(config.getProperty("password")); } instance = builder.build(); } @Test public void getTicket() throws Exception { createClientWithTokenOrPassword(); Ticket ticket = instance.getTicket(1); assertThat(ticket, notNullValue()); } @Test @Ignore("Needs specfic ticket form instance") public void getTicketForm() throws Exception { createClientWithTokenOrPassword(); TicketForm ticketForm = instance.getTicketForm(27562); assertThat(ticketForm, notNullValue()); assertTrue(ticketForm.isEndUserVisible()); } @Test public void getTicketForms() throws Exception { createClientWithTokenOrPassword(); Iterable<TicketForm> ticketForms = instance.getTicketForms(); assertTrue(ticketForms.iterator().hasNext()); for(TicketForm ticketForm : ticketForms){ assertThat(ticketForm, notNullValue()); } } @Test @Ignore("Needs specfic ticket form instance") public void getTicketFieldsOnForm() throws Exception { createClientWithTokenOrPassword(); TicketForm ticketForm = instance.getTicketForm(27562); for(Integer id :ticketForm.getTicketFieldIds()){ Field f = instance.getTicketField(id); assertNotNull(f); } assertThat(ticketForm, notNullValue()); assertTrue(ticketForm.isEndUserVisible()); } @Test public void getTargets() throws Exception { createClientWithTokenOrPassword(); Long firstTargetId = null; for (Target target : instance.getTargets()) { assertNotNull(target); if (firstTargetId != null) { assertNotEquals(firstTargetId, target.getId()); // check for infinite loop } else { firstTargetId = target.getId(); } } } @Test @Ignore("Needs test data setup correctly") public void getTicketsPagesRequests() throws Exception { createClientWithTokenOrPassword(); int count = 0; for (Ticket t : instance.getTickets()) { assertThat(t.getSubject(), notNullValue()); if (++count > 150) { break; } } assertThat(count, is(151)); } @Test @Ignore("Needs test data setup correctly") public void getRecentTickets() throws Exception { createClientWithTokenOrPassword(); int count = 0; for (Ticket t : instance.getRecentTickets()) { assertThat(t.getSubject(), notNullValue()); if (++count > 150) { break; } } assertThat(count, is(151)); } @Test public void getTicketsById() throws Exception { createClientWithTokenOrPassword(); long count = 1; for (Ticket t : instance.getTickets(1, 6, 11)) { assertThat(t.getSubject(), notNullValue()); assertThat(t.getId(), is(count)); count += 5; } assertThat(count, is(16L)); } @Test public void getTicketsIncrementally() throws Exception { createClientWithTokenOrPassword(); int count = 0; for (Ticket t : instance.getTicketsIncrementally(new Date(0L))) { assertThat(t.getId(), notNullValue()); if (++count > 10) { break; } } } @Test public void getTicketAudits() throws Exception { createClientWithTokenOrPassword(); for (Audit a : instance.getTicketAudits(1L)) { assertThat(a, notNullValue()); assertThat(a.getEvents(), not(Collections.<Event>emptyList())); } } @Test public void getTicketFields() throws Exception { createClientWithTokenOrPassword(); int count = 0; for (Field f : instance.getTicketFields()) { assertThat(f, notNullValue()); assertThat(f.getId(), notNullValue()); assertThat(f.getType(), notNullValue()); if (++count > 10) { break; } } } @Test public void createClientWithPassword() throws Exception { assumeHavePassword(); instance = new Zendesk.Builder(config.getProperty("url")) .setUsername(config.getProperty("username")) .setPassword(config.getProperty("password")) .build(); Ticket t = instance.getTicket(1); assertThat(t, notNullValue()); System.out.println(t); } @Test public void createAnonymousClient() { instance = new Zendesk.Builder(config.getProperty("url")) .build(); } @Test @Ignore("Don't spam zendesk") public void createDeleteTicket() throws Exception { createClientWithTokenOrPassword(); assumeThat("Must have a requester email", config.getProperty("requester.email"), notNullValue()); Ticket t = new Ticket( new Ticket.Requester(config.getProperty("requester.name"), config.getProperty("requester.email")), "This is a test", new Comment("Please ignore this ticket")); t.setCollaborators(Arrays.asList(new Collaborator("Bob Example", "bob@example.org"), new Collaborator("Alice Example", "alice@example.org"))); Ticket ticket = instance.createTicket(t); System.out.println(ticket.getId() + " -> " + ticket.getUrl()); assertThat(ticket.getId(), notNullValue()); try { Ticket t2 = instance.getTicket(ticket.getId()); assertThat(t2, notNullValue()); assertThat(t2.getId(), is(ticket.getId())); List<User> ticketCollaborators = instance.getTicketCollaborators(ticket.getId()); assertThat("Collaborators", ticketCollaborators.size(), is(2)); assertThat("First Collaborator", ticketCollaborators.get(0).getEmail(), anyOf(is("alice@example.org"), is("bob@example.org"))); } finally { instance.deleteTicket(ticket.getId()); } assertThat(ticket.getSubject(), is(t.getSubject())); assertThat(ticket.getRequester(), nullValue()); assertThat(ticket.getRequesterId(), notNullValue()); assertThat(ticket.getDescription(), is(t.getComment().getBody())); assertThat("Collaborators", ticket.getCollaboratorIds().size(), is(2)); assertThat(instance.getTicket(ticket.getId()), nullValue()); } @Test @Ignore("Don't spam zendesk") public void createSolveTickets() throws Exception { createClientWithTokenOrPassword(); assumeThat("Must have a requester email", config.getProperty("requester.email"), notNullValue()); Ticket ticket; long firstId = Long.MAX_VALUE; do { Ticket t = new Ticket( new Ticket.Requester(config.getProperty("requester.name"), config.getProperty("requester.email")), "This is a test " + UUID.randomUUID().toString(), new Comment("Please ignore this ticket")); ticket = instance.createTicket(t); System.out.println(ticket.getId() + " -> " + ticket.getUrl()); assertThat(ticket.getId(), notNullValue()); Ticket t2 = instance.getTicket(ticket.getId()); assertThat(t2, notNullValue()); assertThat(t2.getId(), is(ticket.getId())); t2.setAssigneeId(instance.getCurrentUser().getId()); t2.setStatus(Status.CLOSED); instance.updateTicket(t2); assertThat(ticket.getSubject(), is(t.getSubject())); assertThat(ticket.getRequester(), nullValue()); assertThat(ticket.getRequesterId(), notNullValue()); assertThat(ticket.getDescription(), is(t.getComment().getBody())); assertThat(instance.getTicket(ticket.getId()), notNullValue()); firstId = Math.min(ticket.getId(), firstId); } while (ticket.getId() < firstId + 200L); // seed enough data for the paging tests } @Test public void lookupUserByEmail() throws Exception { createClientWithTokenOrPassword(); String requesterEmail = config.getProperty("requester.email"); assumeThat("Must have a requester email", requesterEmail, notNullValue()); for (User user : instance.lookupUserByEmail(requesterEmail)) { assertThat(user.getEmail(), is(requesterEmail)); } } @Test public void searchUserByEmail() throws Exception { createClientWithTokenOrPassword(); String requesterEmail = config.getProperty("requester.email"); assumeThat("Must have a requester email", requesterEmail, notNullValue()); for (User user : instance.getSearchResults(User.class, "requester:"+requesterEmail)) { assertThat(user.getEmail(), is(requesterEmail)); } } @Test public void lookupUserIdentities() throws Exception { createClientWithTokenOrPassword(); User user = instance.getCurrentUser(); for (Identity i : instance.getUserIdentities(user)) { assertThat(i.getId(), notNullValue()); Identity j = instance.getUserIdentity(user, i); assertThat(j.getId(), is(i.getId())); assertThat(j.getType(), is(i.getType())); assertThat(j.getValue(), is(i.getValue())); } } @Test public void getUserRequests() throws Exception { createClientWithTokenOrPassword(); User user = instance.getCurrentUser(); int count = 5; for (Request r : instance.getUserRequests(user)) { assertThat(r.getId(), notNullValue()); System.out.println(r.getSubject()); for (Comment c : instance.getRequestComments(r)) { assertThat(c.getId(), notNullValue()); System.out.println(" " + c.getBody()); } if (--count < 0) { break; } } } @Test public void getUsers() throws Exception { createClientWithTokenOrPassword(); int count = 0; for (User u : instance.getUsers()) { assertThat(u.getName(), notNullValue()); if (++count > 10) { break; } } } @Test public void getUsersIncrementally() throws Exception { createClientWithTokenOrPassword(); int count = 0; for (User u : instance.getUsersIncrementally(new Date(0L))) { assertThat(u.getName(), notNullValue()); if (++count > 10) { break; } } } @Test public void getSuspendedTickets() throws Exception { createClientWithTokenOrPassword(); int count = 0; for (SuspendedTicket ticket : instance.getSuspendedTickets()) { assertThat(ticket.getId(), notNullValue()); if (++count > 10) { break; } } } @Test public void getOrganizations() throws Exception { createClientWithTokenOrPassword(); int count = 0; for (Organization t : instance.getOrganizations()) { assertThat(t.getName(), notNullValue()); if (++count > 10) { break; } } } @Test public void getOrganizationsIncrementally() throws Exception { createClientWithTokenOrPassword(); int count = 0; for (Organization t : instance.getOrganizationsIncrementally(new Date(0L))) { assertThat(t.getName(), notNullValue()); if (++count > 10) { break; } } } @Test public void createOrganization() throws Exception { createClientWithTokenOrPassword(); // Clean up to avoid conflicts for (Organization t : instance.getOrganizations()) { if ("testorg".equals(t.getExternalId())) { instance.deleteOrganization(t); } } Organization org = new Organization(); org.setExternalId("testorg"); org.setName("Test Organization"); Organization result = instance.createOrganization(org); assertNotNull(result); assertNotNull(result.getId()); assertEquals("Test Organization", result.getName()); assertEquals("testorg", result.getExternalId()); instance.deleteOrganization(result); } @Test(timeout = 10000) public void createOrganizations() throws Exception { createClientWithTokenOrPassword(); // Clean up to avoid conflicts for (Organization t : instance.getOrganizations()) { if ("testorg1".equals(t.getExternalId()) || "testorg2".equals(t.getExternalId())) { instance.deleteOrganization(t); } } Organization org1 = new Organization(); org1.setExternalId("testorg1"); org1.setName("Test Organization 1"); Organization org2 = new Organization(); org2.setExternalId("testorg2"); org2.setName("Test Organization 2"); JobStatus<Organization> result = instance.createOrganizations(org1, org2); assertNotNull(result); assertNotNull(result.getId()); assertNotNull(result.getStatus()); while (result.getStatus() != JobStatus.JobStatusEnum.completed) { result = instance.getJobStatus(result); assertNotNull(result); assertNotNull(result.getId()); assertNotNull(result.getStatus()); } List<Organization> resultOrgs = result.getResults(); assertEquals(2, resultOrgs.size()); for (Organization org : resultOrgs) { assertNotNull(org.getId()); instance.deleteOrganization(org); } } @Test(timeout = 10000) public void bulkCreateMultipleJobs() throws Exception { createClientWithTokenOrPassword(); List<Organization> orgs = new ArrayList<Organization>(4); for (int i = 1; i <= 5; i++) { Organization org = new Organization(); org.setExternalId("testorg" + i); org.setName("Test Organization " + i); orgs.add(org); } // Clean up to avoid conflicts for (Organization t : instance.getOrganizations()) { for (Organization org : orgs) { if (org.getExternalId().equals(t.getExternalId())) { instance.deleteOrganization(t); } } } JobStatus result1 = instance.createOrganizations(orgs.subList(0, 2)); JobStatus result2 = instance.createOrganizations(orgs.subList(2, 5)); while (result1.getStatus() != JobStatus.JobStatusEnum.completed || result2.getStatus() != JobStatus.JobStatusEnum.completed) { List<JobStatus<HashMap<String, Object>>> results = instance.getJobStatuses(Arrays.asList(result1, result2)); result1 = results.get(0); result2 = results.get(1); assertNotNull(result1); assertNotNull(result1.getId()); assertNotNull(result2); assertNotNull(result2.getId()); } List<HashMap> resultOrgs1 = result1.getResults(); assertEquals(2, resultOrgs1.size()); List<HashMap> resultOrgs2 = result2.getResults(); assertEquals(3, resultOrgs2.size()); for (HashMap org : resultOrgs1) { assertNotNull(org.get("id")); instance.deleteOrganization(((Number) org.get("id")).longValue()); } for (HashMap org : resultOrgs2) { assertNotNull(org.get("id")); instance.deleteOrganization(((Number) org.get("id")).longValue()); } } @Test public void getGroups() throws Exception { createClientWithTokenOrPassword(); int count = 0; for (Group t : instance.getGroups()) { assertThat(t.getName(), notNullValue()); if (++count > 10) { break; } } } @Test public void getArticles() throws Exception { createClientWithTokenOrPassword(); int count = 0; for (Article t : instance.getArticles()) { assertThat(t.getTitle(), notNullValue()); if (++count > 40) { // Check enough to pull 2 result pages break; } } } @Test public void getArticleTranslations() throws Exception { createClientWithTokenOrPassword(); int articleCount = 0; int translationCount = 0; // Count total translations checked, not per-article for (Article art : instance.getArticles()) { assertNotNull(art.getId()); if (++articleCount > 10) { break; // Do not overwhelm the getArticles API } for (Translation t : instance.getArticleTranslations(art.getId())) { assertNotNull(t.getId()); assertNotNull(t.getTitle()); assertNotNull(t.getBody()); if (++translationCount > 3) { return; } } } } @Test public void getSectionTranslations() throws Exception { createClientWithTokenOrPassword(); int sectionCount = 0; int translationCount = 0; for (Section sect : instance.getSections()) { assertNotNull(sect.getId()); if (++sectionCount > 10) { break; } for (Translation t : instance.getSectionTranslations(sect.getId())) { assertNotNull(t.getId()); assertNotNull(t.getTitle()); assertNotNull(t.getBody()); if (++translationCount > 3) { return; } } } } @Test public void getCategoryTranslations() throws Exception { createClientWithTokenOrPassword(); int categoryCount = 0; int translationCount = 0; for (Category cat : instance.getCategories()) { assertNotNull(cat.getId()); if (++categoryCount > 10) { break; } for (Translation t: instance.getCategoryTranslations(cat.getId())) { assertNotNull(t.getId()); assertNotNull(t.getTitle()); assertNotNull(t.getBody()); if (++translationCount > 3) { return; } } } } }
/* ***** BEGIN LICENSE BLOCK ***** * JTransforms * Copyright (c) 2007 onward, Piotr Wendykier * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * ***** END LICENSE BLOCK ***** */ package org.jtransforms.fft; import java.util.ArrayList; import java.util.Collection; import java.util.Random; import org.jtransforms.utils.CommonUtils; import pl.edu.icm.jlargearrays.ConcurrencyUtils; import org.jtransforms.utils.IOUtils; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import pl.edu.icm.jlargearrays.FloatLargeArray; import pl.edu.icm.jlargearrays.LargeArray; import static org.apache.commons.math3.util.FastMath.*; /** * * This is a test of the class {@link FloatFFT_2D}. In this test, a very crude * 2d FFT method is implemented (see {@link #complexForward(float[][])}), * assuming that {@link FloatFFT_1D} has been fully tested and validated. This * crude (unoptimized) method is then used to establish <em>expected</em> values * of <em>direct</em> Fourier transforms. * </p> * * For <em>inverse</em> Fourier transforms, the test assumes that the * corresponding <em>direct</em> Fourier transform has been tested and * validated. * </p> * * In all cases, the test consists in creating a random array of data, and * verifying that expected and actual values of its Fourier transform coincide * (L2 norm is zero, within a specified accuracy). * </p> * * @author S&eacute;bastien Brisard * @author Piotr Wendykier */ @RunWith(value = Parameterized.class) public class FloatFFT_2DTest { /** * Base message of all exceptions. */ public static final String DEFAULT_MESSAGE = "%d-threaded FFT of size %dx%d: "; /** * The constant value of the seed of the random generator. */ public static final int SEED = 20110602; private static final double EPS = pow(10, -3); private static final double EPS_UNSCALED = 0.5; @Parameters public static Collection<Object[]> getParameters() { final int[] size = {2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 16, 32, 64, 100, 120, 128, 256, 310, 511, 512, 1024}; final ArrayList<Object[]> parameters = new ArrayList<Object[]>(); for (int i = 0; i < size.length; i++) { for (int j = 0; j < size.length; j++) { parameters.add(new Object[]{size[i], size[j], 1, SEED}); parameters.add(new Object[]{size[i], size[j], 8, SEED}); } } return parameters; } /** * Fourier transform of the columns. */ private final FloatFFT_1D cfft; /** * The object to be tested. */ private final FloatFFT_2D fft; /** * Number of columns of the data arrays to be Fourier transformed. */ private final int numCols; /** * Number of rows of the data arrays to be Fourier transformed. */ private final int numRows; /** * Fourier transform of the rows. */ private final FloatFFT_1D rfft; /** * For the generation of the data arrays. */ private final Random random; /** * The number of threads used. */ private final int numThreads; /** * Creates a new instance of this test. * * @param numRows * number of rows * @param numColumns * number of columns * @param numThreads * the number of threads to be used * @param seed * the seed of the random generator */ public FloatFFT_2DTest(final int numRows, final int numColumns, final int numThreads, final long seed) { this.numRows = numRows; this.numCols = numColumns; LargeArray.setMaxSizeOf32bitArray(1); this.rfft = new FloatFFT_1D(numColumns); this.cfft = new FloatFFT_1D(numRows); this.fft = new FloatFFT_2D(numRows, numColumns); this.random = new Random(seed); ConcurrencyUtils.setNumberOfThreads(numThreads); CommonUtils.setThreadsBeginN_2D(4); this.numThreads = ConcurrencyUtils.getNumberOfThreads(); } /** * A crude implementation of 2d complex FFT. * * @param a * the data to be transformed */ public void complexForward(final float[][] a) { for (int r = 0; r < numRows; r++) { rfft.complexForward(a[r]); } final float[] buffer = new float[2 * numRows]; for (int c = 0; c < numCols; c++) { for (int r = 0; r < numRows; r++) { buffer[2 * r] = a[r][2 * c]; buffer[2 * r + 1] = a[r][2 * c + 1]; } cfft.complexForward(buffer); for (int r = 0; r < numRows; r++) { a[r][2 * c] = buffer[2 * r]; a[r][2 * c + 1] = buffer[2 * r + 1]; } } } /** * A test of {@link FloatFFT_2D#complexForward(float[])}. */ @Test public void testComplexForward1dInput() { final float[] actual = new float[2 * numRows * numCols]; final float[][] expected0 = new float[numRows][2 * numCols]; final float[] expected = new float[2 * numRows * numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < 2 * numCols; c++) { final float rnd = random.nextFloat(); actual[2 * r * numCols + c] = rnd; expected0[r][c] = rnd; } } fft.complexForward(actual); complexForward(expected0); for (int r = 0; r < numRows; r++) { for (int c = 0; c < 2 * numCols; c++) { expected[2 * r * numCols + c] = expected0[r][c]; } } double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#complexForward(FloatLargeArray)}. */ @Test public void testComplexForwardLarge() { final FloatLargeArray actual = new FloatLargeArray(2 * numRows * numCols); final float[][] expected0 = new float[numRows][2 * numCols]; final FloatLargeArray expected = new FloatLargeArray(2 * numRows * numCols); for (int r = 0; r < numRows; r++) { for (int c = 0; c < 2 * numCols; c++) { final float rnd = random.nextFloat(); actual.setDouble(2 * r * numCols + c, rnd); expected0[r][c] = rnd; } } fft.complexForward(actual); complexForward(expected0); for (int r = 0; r < numRows; r++) { for (int c = 0; c < 2 * numCols; c++) { expected.setDouble(2 * r * numCols + c, expected0[r][c]); } } double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#complexForward(float[][])}. */ @Test public void testComplexForward2dInput() { final float[][] actual = new float[numRows][2 * numCols]; final float[][] expected = new float[numRows][2 * numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < 2 * numCols; c++) { final float rnd = random.nextFloat(); actual[r][c] = rnd; expected[r][c] = rnd; } } fft.complexForward(actual); complexForward(expected); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#complexInverse(float[], boolean)}, with * the second parameter set to <code>true</code>. */ @Test public void testComplexInverseScaled1dInput() { final float[] expected = new float[2 * numRows * numCols]; final float[] actual = new float[2 * numRows * numCols]; for (int i = 0; i < actual.length; i++) { final float rnd = random.nextFloat(); actual[i] = rnd; expected[i] = rnd; } fft.complexForward(actual); fft.complexInverse(actual, true); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#complexInverse(FloatLargeArray, boolean)}, with * the second parameter set to <code>true</code>. */ @Test public void testComplexInverseScaledLarge() { final FloatLargeArray expected = new FloatLargeArray(2 * numRows * numCols); final FloatLargeArray actual = new FloatLargeArray(2 * numRows * numCols); for (int i = 0; i < actual.length(); i++) { final float rnd = random.nextFloat(); actual.setDouble(i, rnd); expected.setDouble(i, rnd); } fft.complexForward(actual); fft.complexInverse(actual, true); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#complexInverse(float[][], boolean)}, with * the second parameter set to <code>true</code>. */ @Test public void testComplexInverseScaled2dInput() { final float[][] expected = new float[numRows][2 * numCols]; final float[][] actual = new float[numRows][2 * numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < 2 * numCols; c++) { final float rnd = random.nextFloat(); actual[r][c] = rnd; expected[r][c] = rnd; } } fft.complexForward(actual); fft.complexInverse(actual, true); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#complexInverse(float[], boolean)}, with * the second parameter set to <code>false</code>. */ @Test public void testComplexInverseUnScaled1dInput() { final float[] expected = new float[2 * numRows * numCols]; final float[] actual = new float[2 * numRows * numCols]; for (int i = 0; i < actual.length; i++) { final float rnd = random.nextFloat(); actual[i] = rnd; expected[i] = rnd; } fft.complexForward(actual); fft.complexInverse(actual, false); final float s = numRows * numCols; for (int i = 0; i < actual.length; i++) { actual[i] = actual[i] / s; } double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#complexInverse(FloatLargeArray, boolean)}, with * the second parameter set to <code>false</code>. */ @Test public void testComplexInverseUnScaledLarge() { final FloatLargeArray expected = new FloatLargeArray(2 * numRows * numCols); final FloatLargeArray actual = new FloatLargeArray(2 * numRows * numCols); for (int i = 0; i < actual.length(); i++) { final float rnd = random.nextFloat(); actual.setDouble(i, rnd); expected.setDouble(i, rnd); } fft.complexForward(actual); fft.complexInverse(actual, false); final float s = numRows * numCols; for (int i = 0; i < actual.length(); i++) { actual.setDouble(i, actual.getDouble(i) / s); } double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#complexInverse(float[][], boolean)}, with * the second parameter set to <code>false</code>. */ @Test public void testComplexInverseUnScaled2dInput() { final float[][] expected = new float[numRows][2 * numCols]; final float[][] actual = new float[numRows][2 * numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < 2 * numCols; c++) { final float rnd = random.nextFloat(); expected[r][c] = rnd; actual[r][c] = rnd; } } fft.complexForward(actual); fft.complexInverse(actual, false); final float s = numRows * numCols; for (int r = 0; r < numRows; r++) { for (int c = 0; c < 2 * numCols; c++) { actual[r][c] = actual[r][c] / s; } } double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } private static void fillSymmetric(final float[] a, int rows, int columns) { final int twon2 = 2 * columns; int idx1, idx2, idx3, idx4; int n1d2 = rows / 2; for (int r = (rows - 1); r >= 1; r--) { idx1 = r * columns; idx2 = 2 * idx1; for (int c = 0; c < columns; c += 2) { a[idx2 + c] = a[idx1 + c]; a[idx1 + c] = 0; a[idx2 + c + 1] = a[idx1 + c + 1]; a[idx1 + c + 1] = 0; } } for (int r = 1; r < n1d2; r++) { idx2 = r * twon2; idx3 = (rows - r) * twon2; a[idx2 + columns] = a[idx3 + 1]; a[idx2 + columns + 1] = -a[idx3]; } for (int r = 1; r < n1d2; r++) { idx2 = r * twon2; idx3 = (rows - r + 1) * twon2; for (int c = columns + 2; c < twon2; c += 2) { a[idx2 + c] = a[idx3 - c]; a[idx2 + c + 1] = -a[idx3 - c + 1]; } } for (int r = 0; r <= rows / 2; r++) { idx1 = r * twon2; idx4 = ((rows - r) % rows) * twon2; for (int c = 0; c < twon2; c += 2) { idx2 = idx1 + c; idx3 = idx4 + (twon2 - c) % twon2; a[idx3] = a[idx2]; a[idx3 + 1] = -a[idx2 + 1]; } } a[columns] = -a[1]; a[1] = 0; idx1 = n1d2 * twon2; a[idx1 + columns] = -a[idx1 + 1]; a[idx1 + 1] = 0; a[idx1 + columns + 1] = 0; } /** * A test of {@link FloatFFT_2D#realForward(float[])}. */ @Test public void testRealForward1dInput() { if (!CommonUtils.isPowerOf2(numRows)) { return; } if (!CommonUtils.isPowerOf2(numCols)) { return; } final float[] actual = new float[2 * numRows * numCols]; final float[] expected = new float[numRows * 2 * numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); actual[r * numCols + c] = rnd; expected[r * 2 * numCols + 2 * c] = rnd; } } fft.realForward(actual); fft.complexForward(expected); fillSymmetric(actual, numRows, numCols); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } private static void fillSymmetric(final FloatLargeArray a, int rowsl, int columnsl) { final long twon2 = 2 * columnsl; long idx1, idx2, idx3, idx4; long n1d2 = rowsl / 2; for (long r = (rowsl - 1); r >= 1; r--) { idx1 = r * columnsl; idx2 = 2 * idx1; for (long c = 0; c < columnsl; c += 2) { a.setDouble(idx2 + c, a.getDouble(idx1 + c)); a.setDouble(idx1 + c, 0); a.setDouble(idx2 + c + 1, a.getDouble(idx1 + c + 1)); a.setDouble(idx1 + c + 1, 0); } } for (long r = 1; r < n1d2; r++) { idx2 = r * twon2; idx3 = (rowsl - r) * twon2; a.setDouble(idx2 + columnsl, a.getDouble(idx3 + 1)); a.setDouble(idx2 + columnsl + 1, -a.getDouble(idx3)); } for (long r = 1; r < n1d2; r++) { idx2 = r * twon2; idx3 = (rowsl - r + 1) * twon2; for (long c = columnsl + 2; c < twon2; c += 2) { a.setDouble(idx2 + c, a.getDouble(idx3 - c)); a.setDouble(idx2 + c + 1, -a.getDouble(idx3 - c + 1)); } } for (long r = 0; r <= rowsl / 2; r++) { idx1 = r * twon2; idx4 = ((rowsl - r) % rowsl) * twon2; for (long c = 0; c < twon2; c += 2) { idx2 = idx1 + c; idx3 = idx4 + (twon2 - c) % twon2; a.setDouble(idx3, a.getDouble(idx2)); a.setDouble(idx3 + 1, -a.getDouble(idx2 + 1)); } } a.setDouble(columnsl, -a.getDouble(1)); a.setDouble(1, 0); idx1 = n1d2 * twon2; a.setDouble(idx1 + columnsl, -a.getDouble(idx1 + 1)); a.setDouble(idx1 + 1, 0); a.setDouble(idx1 + columnsl + 1, 0); } /** * A test of {@link FloatFFT_2D#realForward(FloatLargeArray)}. */ @Test public void testRealForwardLarge() { if (!CommonUtils.isPowerOf2(numRows)) { return; } if (!CommonUtils.isPowerOf2(numCols)) { return; } final FloatLargeArray actual = new FloatLargeArray(2 * numRows * numCols); final FloatLargeArray expected = new FloatLargeArray(numRows * 2 * numCols); for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); actual.setDouble(r * numCols + c, rnd); expected.setDouble(r * 2 * numCols + 2 * c, rnd); } } fft.realForward(actual); fft.complexForward(expected); fillSymmetric(actual, numRows, numCols); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } private void fillSymmetric(final float[][] a, int rows, int columns) { final int newn2 = 2 * columns; int n1d2 = rows / 2; for (int r = 1; r < n1d2; r++) { int idx1 = rows - r; a[r][columns] = a[idx1][1]; a[r][columns + 1] = -a[idx1][0]; } for (int r = 1; r < n1d2; r++) { int idx1 = rows - r; for (int c = columns + 2; c < newn2; c += 2) { int idx2 = newn2 - c; a[r][c] = a[idx1][idx2]; a[r][c + 1] = -a[idx1][idx2 + 1]; } } for (int r = 0; r <= rows / 2; r++) { int idx1 = (rows - r) % rows; for (int c = 0; c < newn2; c += 2) { int idx2 = (newn2 - c) % newn2; a[idx1][idx2] = a[r][c]; a[idx1][idx2 + 1] = -a[r][c + 1]; } } a[0][columns] = -a[0][1]; a[0][1] = 0; a[n1d2][columns] = -a[n1d2][1]; a[n1d2][1] = 0; a[n1d2][columns + 1] = 0; } /** * A test of {@link FloatFFT_2D#realForward(float[][])}. */ @Test public void testRealForward2dInput() { if (!CommonUtils.isPowerOf2(numRows)) { return; } if (!CommonUtils.isPowerOf2(numCols)) { return; } final float[][] actual = new float[numRows][2 * numCols]; final float[][] expected = new float[numRows][2 * numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); actual[r][c] = rnd; expected[r][2 * c] = rnd; } } fft.realForward(actual); complexForward(expected); fillSymmetric(actual, numRows, numCols); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#realForwardFull(float[])}. */ @Test public void testRealForwardFull1dInput() { final float[] actual = new float[2 * numRows * numCols]; final float[][] expected0 = new float[numRows][2 * numCols]; final float[] expected = new float[numRows * 2 * numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); actual[r * numCols + c] = rnd; expected0[r][2 * c] = rnd; } } fft.realForwardFull(actual); complexForward(expected0); for (int r = 0; r < numRows; r++) { for (int c = 0; c < 2 * numCols; c++) { expected[2 * r * numCols + c] = expected0[r][c]; } } double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#realForwardFull(FloatLargeArray)}. */ @Test public void testRealForwardFullLarge() { final FloatLargeArray actual = new FloatLargeArray(2 * numRows * numCols); final float[][] expected0 = new float[numRows][2 * numCols]; final FloatLargeArray expected = new FloatLargeArray(numRows * 2 * numCols); for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); actual.setDouble(r * numCols + c, rnd); expected0[r][2 * c] = rnd; } } fft.realForwardFull(actual); complexForward(expected0); for (int r = 0; r < numRows; r++) { for (int c = 0; c < 2 * numCols; c++) { expected.setDouble(2 * r * numCols + c, expected0[r][c]); } } double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#realForwardFull(float[][])}. */ @Test public void testRealForwardFull2dInput() { final float[][] actual = new float[numRows][2 * numCols]; final float[][] expected = new float[numRows][2 * numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); actual[r][c] = rnd; expected[r][2 * c] = rnd; } } fft.realForwardFull(actual); complexForward(expected); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#realInverseFull(float[], boolean)}, with * the second parameter set to <code>true</code>. */ @Test public void testRealInverseFullScaled1dInput() { final float[] actual = new float[2 * numRows * numCols]; final float[] expected = new float[2 * numRows * numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); final int index = r * numCols + c; actual[index] = rnd; expected[2 * index] = rnd; } } // TODO If the two following lines are permuted, this causes an array // index out of bounds exception. fft.complexInverse(expected, true); fft.realInverseFull(actual, true); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#realInverseFull(FloatLargeArray, boolean)}, with * the second parameter set to <code>true</code>. */ @Test public void testRealInverseFullScaledLarge() { final FloatLargeArray actual = new FloatLargeArray(2 * numRows * numCols); final FloatLargeArray expected = new FloatLargeArray(2 * numRows * numCols); for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); final int index = r * numCols + c; actual.setDouble(index, rnd); expected.setDouble(2 * index, rnd); } } // TODO If the two following lines are permuted, this causes an array // index out of bounds exception. fft.complexInverse(expected, true); fft.realInverseFull(actual, true); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#realInverseFull(float[][], boolean)}, with * the second parameter set to <code>true</code>. */ @Test public void testRealInverseFullScaled2dInput() { final float[][] actual = new float[numRows][2 * numCols]; final float[][] expected = new float[numRows][2 * numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); actual[r][c] = rnd; expected[r][2 * c] = rnd; } } fft.realInverseFull(actual, true); fft.complexInverse(expected, true); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#realInverseFull(float[], boolean)}, with * the second parameter set to <code>false</code>. */ @Test public void testRealInverseFullUnscaled1dInput() { final float[] actual = new float[2 * numRows * numCols]; final float[] expected = new float[2 * numRows * numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); final int index = r * numCols + c; actual[index] = rnd; expected[2 * index] = rnd; } } // TODO If the two following lines are permuted, this causes an array // index out of bounds exception. fft.complexInverse(expected, false); fft.realInverseFull(actual, false); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS_UNSCALED); } /** * A test of {@link FloatFFT_2D#realInverseFull(FloatLargeArray, boolean)}, with * the second parameter set to <code>false</code>. */ @Test public void testRealInverseFullUnscaledLarge() { final FloatLargeArray actual = new FloatLargeArray(2 * numRows * numCols); final FloatLargeArray expected = new FloatLargeArray(2 * numRows * numCols); for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); final int index = r * numCols + c; actual.set(index, rnd); expected.setDouble(2 * index, rnd); } } // TODO If the two following lines are permuted, this causes an array // index out of bounds exception. fft.complexInverse(expected, false); fft.realInverseFull(actual, false); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS_UNSCALED); } /** * A test of {@link FloatFFT_2D#realInverseFull(float[][], boolean)}, with * the second parameter set to <code>false</code>. */ @Test public void testRealInverseFullUnscaled2dInput() { final float[][] actual = new float[numRows][2 * numCols]; final float[][] expected = new float[numRows][2 * numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); actual[r][c] = rnd; expected[r][2 * c] = rnd; } } fft.realInverseFull(actual, false); fft.complexInverse(expected, false); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS_UNSCALED); } /** * A test of {@link FloatFFT_2D#realInverse(float[], boolean)}, with the * second parameter set to <code>true</code>. */ @Test public void testRealInverseScaled1dInput() { if (!CommonUtils.isPowerOf2(numRows)) { return; } if (!CommonUtils.isPowerOf2(numCols)) { return; } final float[] actual = new float[numRows * numCols]; final float[] expected = new float[actual.length]; for (int i = 0; i < actual.length; i++) { final float rnd = random.nextFloat(); actual[i] = rnd; expected[i] = rnd; } fft.realForward(actual); fft.realInverse(actual, true); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#realInverse(FloatLargeArray, boolean)}, with the * second parameter set to <code>true</code>. */ @Test public void testRealInverseScaledLarge() { if (!CommonUtils.isPowerOf2(numRows)) { return; } if (!CommonUtils.isPowerOf2(numCols)) { return; } final FloatLargeArray actual = new FloatLargeArray(numRows * numCols); final FloatLargeArray expected = new FloatLargeArray(actual.length()); for (int i = 0; i < actual.length(); i++) { final float rnd = random.nextFloat(); actual.setDouble(i, rnd); expected.setDouble(i, rnd); } fft.realForward(actual); fft.realInverse(actual, true); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } /** * A test of {@link FloatFFT_2D#realInverse(float[][], boolean)}, with the * second parameter set to <code>true</code>. */ @Test public void testRealInverseScaled2dInput() { if (!CommonUtils.isPowerOf2(numRows)) { return; } if (!CommonUtils.isPowerOf2(numCols)) { return; } final float[][] actual = new float[numRows][numCols]; final float[][] expected = new float[numRows][numCols]; for (int r = 0; r < numRows; r++) { for (int c = 0; c < numCols; c++) { final float rnd = random.nextFloat(); actual[r][c] = rnd; expected[r][c] = rnd; } } fft.realForward(actual); fft.realInverse(actual, true); double rmse = IOUtils.computeRMSE(actual, expected); Assert.assertEquals(String.format(DEFAULT_MESSAGE, numThreads, numRows, numCols) + ", rmse = " + rmse, 0.0, rmse, EPS); } }
/* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.cache.merge; import com.hazelcast.cache.ICache; import com.hazelcast.config.Config; import com.hazelcast.config.InMemoryFormat; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.spi.merge.DiscardMergePolicy; import com.hazelcast.spi.merge.HigherHitsMergePolicy; import com.hazelcast.spi.merge.LatestAccessMergePolicy; import com.hazelcast.spi.merge.LatestUpdateMergePolicy; import com.hazelcast.spi.merge.PassThroughMergePolicy; import com.hazelcast.spi.merge.PutIfAbsentMergePolicy; import com.hazelcast.spi.merge.SplitBrainMergePolicy; import com.hazelcast.test.HazelcastParallelParametersRunnerFactory; import com.hazelcast.test.HazelcastParametrizedRunner; import com.hazelcast.test.SplitBrainTestSupport; import com.hazelcast.test.annotation.ParallelJVMTest; import com.hazelcast.test.annotation.QuickTest; import com.hazelcast.test.backup.BackupAccessor; import com.hazelcast.test.backup.TestBackupUtils; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; import org.junit.runners.Parameterized.UseParametersRunnerFactory; import java.util.Collection; import static com.hazelcast.config.InMemoryFormat.BINARY; import static com.hazelcast.config.InMemoryFormat.OBJECT; import static com.hazelcast.test.backup.TestBackupUtils.assertBackupEntryEqualsEventually; import static com.hazelcast.test.backup.TestBackupUtils.assertBackupEntryNullEventually; import static com.hazelcast.test.backup.TestBackupUtils.assertBackupSizeEventually; import static java.util.Arrays.asList; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.fail; /** * Tests different split-brain scenarios for {@link com.hazelcast.cache.ICache}. * <p> * Most merge policies are tested with {@link InMemoryFormat#BINARY} only, since they don't check the value. * <p> * The {@link MergeIntegerValuesMergePolicy} is tested with both in-memory formats, since it's using the value to merge. * <p> * The {@link DiscardMergePolicy}, {@link PassThroughMergePolicy} and {@link PutIfAbsentMergePolicy} are also * tested with a data structure, which is only created in the smaller cluster. */ @RunWith(HazelcastParametrizedRunner.class) @UseParametersRunnerFactory(HazelcastParallelParametersRunnerFactory.class) @Category({QuickTest.class, ParallelJVMTest.class}) @SuppressWarnings("WeakerAccess") public class CacheSplitBrainTest extends SplitBrainTestSupport { @Parameters(name = "format:{0}, mergePolicy:{1}") public static Collection<Object[]> parameters() { return asList(new Object[][]{ {BINARY, DiscardMergePolicy.class}, {BINARY, HigherHitsMergePolicy.class}, {BINARY, LatestAccessMergePolicy.class}, {BINARY, PassThroughMergePolicy.class}, {BINARY, PutIfAbsentMergePolicy.class}, {BINARY, MergeIntegerValuesMergePolicy.class}, {OBJECT, MergeIntegerValuesMergePolicy.class}, }); } @Parameter public InMemoryFormat inMemoryFormat; @Parameter(value = 1) public Class<? extends SplitBrainMergePolicy> mergePolicyClass; protected String cacheNameA = randomMapName("cacheA-"); protected String cacheNameB = randomMapName("cacheB-"); protected ICache<Object, Object> cacheA1; protected ICache<Object, Object> cacheA2; protected ICache<Object, Object> cacheB1; protected ICache<Object, Object> cacheB2; protected BackupAccessor<Object, Object> backupCacheA; protected BackupAccessor<Object, Object> backupCacheB; protected MergeLifecycleListener mergeLifecycleListener; @Override protected Config config() { Config config = super.config(); config.getCacheConfig(cacheNameA) .setInMemoryFormat(inMemoryFormat) .setBackupCount(1) .setAsyncBackupCount(0) .setStatisticsEnabled(false) .getMergePolicyConfig().setPolicy(mergePolicyClass.getName()); config.getCacheConfig(cacheNameB) .setInMemoryFormat(inMemoryFormat) .setBackupCount(1) .setAsyncBackupCount(0) .setStatisticsEnabled(false) .getMergePolicyConfig().setPolicy(mergePolicyClass.getName()); return config; } @Override protected void onBeforeSplitBrainCreated(HazelcastInstance[] instances) { waitAllForSafeState(instances); BackupAccessor<Object, Object> accessor = TestBackupUtils.newCacheAccessor(instances, cacheNameA); assertEquals("backupCache should contain 0 entries", 0, accessor.size()); } @Override protected void onAfterSplitBrainCreated(HazelcastInstance[] firstBrain, HazelcastInstance[] secondBrain) { mergeLifecycleListener = new MergeLifecycleListener(secondBrain.length); for (HazelcastInstance instance : secondBrain) { instance.getLifecycleService().addLifecycleListener(mergeLifecycleListener); } cacheA1 = firstBrain[0].getCacheManager().getCache(cacheNameA); cacheA2 = secondBrain[0].getCacheManager().getCache(cacheNameA); cacheB2 = secondBrain[0].getCacheManager().getCache(cacheNameB); if (mergePolicyClass == DiscardMergePolicy.class) { afterSplitDiscardMergePolicy(); } else if (mergePolicyClass == HigherHitsMergePolicy.class) { afterSplitHigherHitsMergePolicy(); } else if (mergePolicyClass == LatestAccessMergePolicy.class) { afterSplitLatestAccessMergePolicy(); } else if (mergePolicyClass == LatestUpdateMergePolicy.class) { afterSplitLatestUpdateMergePolicy(); } else if (mergePolicyClass == PassThroughMergePolicy.class) { afterSplitPassThroughMergePolicy(); } else if (mergePolicyClass == PutIfAbsentMergePolicy.class) { afterSplitPutIfAbsentMergePolicy(); } else if (mergePolicyClass == MergeIntegerValuesMergePolicy.class) { afterSplitCustomMergePolicy(); } else { fail(); } } @Override protected void onAfterSplitBrainHealed(HazelcastInstance[] instances) { // wait until merge completes mergeLifecycleListener.await(); cacheB1 = instances[0].getCacheManager().getCache(cacheNameB); backupCacheA = TestBackupUtils.newCacheAccessor(instances, cacheNameA); backupCacheB = TestBackupUtils.newCacheAccessor(instances, cacheNameB); if (mergePolicyClass == DiscardMergePolicy.class) { afterMergeDiscardMergePolicy(); } else if (mergePolicyClass == HigherHitsMergePolicy.class) { afterMergeHigherHitsMergePolicy(); } else if (mergePolicyClass == LatestAccessMergePolicy.class) { afterMergeLatestAccessMergePolicy(); } else if (mergePolicyClass == LatestUpdateMergePolicy.class) { afterMergeLatestUpdateMergePolicy(); } else if (mergePolicyClass == PassThroughMergePolicy.class) { afterMergePassThroughMergePolicy(); } else if (mergePolicyClass == PutIfAbsentMergePolicy.class) { afterMergePutIfAbsentMergePolicy(); } else if (mergePolicyClass == MergeIntegerValuesMergePolicy.class) { afterMergeCustomMergePolicy(); } else { fail(); } } private void afterSplitDiscardMergePolicy() { cacheA1.put("key1", "value1"); cacheA2.put("key1", "DiscardedValue1"); cacheA2.put("key2", "DiscardedValue2"); cacheB2.put("key", "DiscardedValue"); } private void afterMergeDiscardMergePolicy() { assertEquals("value1", cacheA1.get("key1")); assertEquals("value1", cacheA2.get("key1")); assertBackupEntryEqualsEventually("key1", "value1", backupCacheA); assertNull(cacheA1.get("key2")); assertNull(cacheA2.get("key2")); assertBackupEntryNullEventually("key2", backupCacheA); assertEquals(1, cacheA1.size()); assertEquals(1, cacheA2.size()); assertBackupSizeEventually(1, backupCacheA); assertNull(cacheB1.get("key")); assertNull(cacheB2.get("key")); assertBackupEntryNullEventually("key", backupCacheB); assertEquals(0, cacheB1.size()); assertEquals(0, cacheB2.size()); assertBackupSizeEventually(0, backupCacheB); } private void afterSplitHigherHitsMergePolicy() { cacheA1.put("key1", "higherHitsValue1"); cacheA1.put("key2", "value2"); // increase hits number assertEquals("higherHitsValue1", cacheA1.get("key1")); assertEquals("higherHitsValue1", cacheA1.get("key1")); cacheA2.put("key1", "value1"); cacheA2.put("key2", "higherHitsValue2"); // increase hits number assertEquals("higherHitsValue2", cacheA2.get("key2")); assertEquals("higherHitsValue2", cacheA2.get("key2")); } private void afterMergeHigherHitsMergePolicy() { assertEquals("higherHitsValue1", cacheA1.get("key1")); assertEquals("higherHitsValue1", cacheA2.get("key1")); assertBackupEntryEqualsEventually("key1", "higherHitsValue1", backupCacheA); assertEquals("higherHitsValue2", cacheA1.get("key2")); assertEquals("higherHitsValue2", cacheA2.get("key2")); assertBackupEntryEqualsEventually("key2", "higherHitsValue2", backupCacheA); assertEquals(2, cacheA1.size()); assertEquals(2, cacheA2.size()); assertBackupSizeEventually(2, backupCacheA); } private void afterSplitLatestAccessMergePolicy() { cacheA1.put("key1", "value1"); // access to record assertEquals("value1", cacheA1.get("key1")); // prevent updating at the same time sleepAtLeastMillis(100); cacheA2.put("key1", "LatestAccessedValue1"); // access to record assertEquals("LatestAccessedValue1", cacheA2.get("key1")); cacheA2.put("key2", "value2"); // access to record assertEquals("value2", cacheA2.get("key2")); // prevent updating at the same time sleepAtLeastMillis(100); cacheA1.put("key2", "LatestAccessedValue2"); // access to record assertEquals("LatestAccessedValue2", cacheA1.get("key2")); } private void afterMergeLatestAccessMergePolicy() { assertEquals("LatestAccessedValue1", cacheA1.get("key1")); assertEquals("LatestAccessedValue1", cacheA2.get("key1")); assertBackupEntryEqualsEventually("key1", "LatestAccessedValue1", backupCacheA); assertEquals("LatestAccessedValue2", cacheA1.get("key2")); assertEquals("LatestAccessedValue2", cacheA2.get("key2")); assertBackupEntryEqualsEventually("key2", "LatestAccessedValue2", backupCacheA); assertEquals(2, cacheA1.size()); assertEquals(2, cacheA2.size()); assertBackupSizeEventually(2, backupCacheA); } private void afterSplitLatestUpdateMergePolicy() { cacheA1.put("key1", "value1"); // prevent updating at the same time sleepAtLeastMillis(100); cacheA2.put("key1", "LatestUpdatedValue1"); cacheA2.put("key2", "value2"); // prevent updating at the same time sleepAtLeastMillis(100); cacheA1.put("key2", "LatestUpdatedValue2"); } private void afterMergeLatestUpdateMergePolicy() { assertEquals("LatestUpdatedValue1", cacheA1.get("key1")); assertEquals("LatestUpdatedValue1", cacheA2.get("key1")); assertBackupEntryEqualsEventually("key1", "LatestUpdatedValue1", backupCacheA); assertEquals("LatestUpdatedValue2", cacheA1.get("key2")); assertEquals("LatestUpdatedValue2", cacheA2.get("key2")); assertBackupEntryEqualsEventually("key2", "LatestUpdatedValue2", backupCacheA); assertEquals(2, cacheA1.size()); assertEquals(2, cacheA2.size()); assertBackupSizeEventually(2, backupCacheA); } private void afterSplitPassThroughMergePolicy() { cacheA1.put("key1", "value1"); cacheA2.put("key1", "PassThroughValue1"); cacheA2.put("key2", "PassThroughValue2"); cacheB2.put("key", "PutIfAbsentValue"); } private void afterMergePassThroughMergePolicy() { assertEquals("PassThroughValue1", cacheA1.get("key1")); assertEquals("PassThroughValue1", cacheA2.get("key1")); assertBackupEntryEqualsEventually("key1", "PassThroughValue1", backupCacheA); assertEquals("PassThroughValue2", cacheA1.get("key2")); assertEquals("PassThroughValue2", cacheA2.get("key2")); assertBackupEntryEqualsEventually("key2", "PassThroughValue2", backupCacheA); assertEquals(2, cacheA1.size()); assertEquals(2, cacheA2.size()); assertBackupSizeEventually(2, backupCacheA); assertEquals("PutIfAbsentValue", cacheB1.get("key")); assertEquals("PutIfAbsentValue", cacheB2.get("key")); assertBackupEntryEqualsEventually("key", "PutIfAbsentValue", backupCacheB); assertEquals(1, cacheB1.size()); assertEquals(1, cacheB2.size()); assertBackupSizeEventually(1, backupCacheB); } private void afterSplitPutIfAbsentMergePolicy() { cacheA1.put("key1", "PutIfAbsentValue1"); cacheA2.put("key1", "value"); cacheA2.put("key2", "PutIfAbsentValue2"); cacheB2.put("key", "PutIfAbsentValue"); } private void afterMergePutIfAbsentMergePolicy() { assertEquals("PutIfAbsentValue1", cacheA1.get("key1")); assertEquals("PutIfAbsentValue1", cacheA2.get("key1")); assertBackupEntryEqualsEventually("key1", "PutIfAbsentValue1", backupCacheA); assertEquals("PutIfAbsentValue2", cacheA1.get("key2")); assertEquals("PutIfAbsentValue2", cacheA2.get("key2")); assertBackupEntryEqualsEventually("key2", "PutIfAbsentValue2", backupCacheA); assertEquals(2, cacheA1.size()); assertEquals(2, cacheA2.size()); assertBackupSizeEventually(2, backupCacheA); assertEquals("PutIfAbsentValue", cacheB1.get("key")); assertEquals("PutIfAbsentValue", cacheB2.get("key")); assertBackupEntryEqualsEventually("key", "PutIfAbsentValue", backupCacheB); assertEquals(1, cacheB1.size()); assertEquals(1, cacheB2.size()); assertBackupSizeEventually(1, backupCacheB); } private void afterSplitCustomMergePolicy() { cacheA1.put("key", "value"); cacheA2.put("key", 1); } private void afterMergeCustomMergePolicy() { assertEquals(1, cacheA1.get("key")); assertEquals(1, cacheA2.get("key")); assertBackupEntryEqualsEventually("key", 1, backupCacheA); assertEquals(1, cacheA1.size()); assertEquals(1, cacheA2.size()); assertBackupSizeEventually(1, backupCacheA); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.service; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchIllegalStateException; import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.ClusterState.Builder; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.ProcessClusterEventTimeoutException; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.operation.OperationRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.*; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoveryService; import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Queue; import java.util.concurrent.*; import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory; /** * */ public class InternalClusterService extends AbstractLifecycleComponent<ClusterService> implements ClusterService { private final ThreadPool threadPool; private final DiscoveryService discoveryService; private final OperationRouting operationRouting; private final TransportService transportService; private final NodeSettingsService nodeSettingsService; private final TimeValue reconnectInterval; private volatile PrioritizedEsThreadPoolExecutor updateTasksExecutor; private final List<ClusterStateListener> priorityClusterStateListeners = new CopyOnWriteArrayList<>(); private final List<ClusterStateListener> clusterStateListeners = new CopyOnWriteArrayList<>(); private final List<ClusterStateListener> lastClusterStateListeners = new CopyOnWriteArrayList<>(); private final LocalNodeMasterListeners localNodeMasterListeners; private final Queue<NotifyTimeout> onGoingTimeouts = ConcurrentCollections.newQueue(); private volatile ClusterState clusterState; private final ClusterBlocks.Builder initialBlocks = ClusterBlocks.builder().addGlobalBlock(Discovery.NO_MASTER_BLOCK); private volatile ScheduledFuture reconnectToNodes; @Inject public InternalClusterService(Settings settings, DiscoveryService discoveryService, OperationRouting operationRouting, TransportService transportService, NodeSettingsService nodeSettingsService, ThreadPool threadPool, ClusterName clusterName) { super(settings); this.operationRouting = operationRouting; this.transportService = transportService; this.discoveryService = discoveryService; this.threadPool = threadPool; this.nodeSettingsService = nodeSettingsService; this.clusterState = ClusterState.builder(clusterName).build(); this.nodeSettingsService.setClusterService(this); this.reconnectInterval = componentSettings.getAsTime("reconnect_interval", TimeValue.timeValueSeconds(10)); localNodeMasterListeners = new LocalNodeMasterListeners(threadPool); } public NodeSettingsService settingsService() { return this.nodeSettingsService; } public void addInitialStateBlock(ClusterBlock block) throws ElasticsearchIllegalStateException { if (lifecycle.started()) { throw new ElasticsearchIllegalStateException("can't set initial block when started"); } initialBlocks.addGlobalBlock(block); } @Override public void removeInitialStateBlock(ClusterBlock block) throws ElasticsearchIllegalStateException { if (lifecycle.started()) { throw new ElasticsearchIllegalStateException("can't set initial block when started"); } initialBlocks.removeGlobalBlock(block); } @Override protected void doStart() throws ElasticsearchException { add(localNodeMasterListeners); this.clusterState = ClusterState.builder(clusterState).blocks(initialBlocks).build(); this.updateTasksExecutor = EsExecutors.newSinglePrioritizing(daemonThreadFactory(settings, "clusterService#updateTask")); this.reconnectToNodes = threadPool.schedule(reconnectInterval, ThreadPool.Names.GENERIC, new ReconnectToNodes()); } @Override protected void doStop() throws ElasticsearchException { this.reconnectToNodes.cancel(true); for (NotifyTimeout onGoingTimeout : onGoingTimeouts) { onGoingTimeout.cancel(); onGoingTimeout.listener.onClose(); } updateTasksExecutor.shutdown(); try { updateTasksExecutor.awaitTermination(10, TimeUnit.SECONDS); } catch (InterruptedException e) { // ignore } remove(localNodeMasterListeners); } @Override protected void doClose() throws ElasticsearchException { } @Override public DiscoveryNode localNode() { return discoveryService.localNode(); } @Override public OperationRouting operationRouting() { return operationRouting; } public ClusterState state() { return this.clusterState; } public void addFirst(ClusterStateListener listener) { priorityClusterStateListeners.add(listener); } public void addLast(ClusterStateListener listener) { lastClusterStateListeners.add(listener); } public void add(ClusterStateListener listener) { clusterStateListeners.add(listener); } public void remove(ClusterStateListener listener) { clusterStateListeners.remove(listener); priorityClusterStateListeners.remove(listener); lastClusterStateListeners.remove(listener); for (Iterator<NotifyTimeout> it = onGoingTimeouts.iterator(); it.hasNext(); ) { NotifyTimeout timeout = it.next(); if (timeout.listener.equals(listener)) { timeout.cancel(); it.remove(); } } } @Override public void add(LocalNodeMasterListener listener) { localNodeMasterListeners.add(listener); } @Override public void remove(LocalNodeMasterListener listener) { localNodeMasterListeners.remove(listener); } public void add(final TimeValue timeout, final TimeoutClusterStateListener listener) { if (lifecycle.stoppedOrClosed()) { listener.onClose(); return; } // call the post added notification on the same event thread try { updateTasksExecutor.execute(new PrioritizedRunnable(Priority.HIGH) { @Override public void run() { NotifyTimeout notifyTimeout = new NotifyTimeout(listener, timeout); notifyTimeout.future = threadPool.schedule(timeout, ThreadPool.Names.GENERIC, notifyTimeout); onGoingTimeouts.add(notifyTimeout); clusterStateListeners.add(listener); listener.postAdded(); } }); } catch (EsRejectedExecutionException e) { if (lifecycle.stoppedOrClosed()) { listener.onClose(); } else { throw e; } } } public void submitStateUpdateTask(final String source, final ClusterStateUpdateTask updateTask) { submitStateUpdateTask(source, Priority.NORMAL, updateTask); } public void submitStateUpdateTask(final String source, Priority priority, final ClusterStateUpdateTask updateTask) { if (!lifecycle.started()) { return; } try { final UpdateTask task = new UpdateTask(source, priority, updateTask); if (updateTask instanceof TimeoutClusterStateUpdateTask) { final TimeoutClusterStateUpdateTask timeoutUpdateTask = (TimeoutClusterStateUpdateTask) updateTask; updateTasksExecutor.execute(task, threadPool.scheduler(), timeoutUpdateTask.timeout(), new Runnable() { @Override public void run() { threadPool.generic().execute(new Runnable() { @Override public void run() { timeoutUpdateTask.onFailure(task.source, new ProcessClusterEventTimeoutException(timeoutUpdateTask.timeout(), task.source)); } }); } }); } else { updateTasksExecutor.execute(task); } } catch (EsRejectedExecutionException e) { // ignore cases where we are shutting down..., there is really nothing interesting // to be done here... if (!lifecycle.stoppedOrClosed()) { throw e; } } } @Override public List<PendingClusterTask> pendingTasks() { long now = System.currentTimeMillis(); PrioritizedEsThreadPoolExecutor.Pending[] pendings = updateTasksExecutor.getPending(); List<PendingClusterTask> pendingClusterTasks = new ArrayList<>(pendings.length); for (PrioritizedEsThreadPoolExecutor.Pending pending : pendings) { final String source; final long timeInQueue; if (pending.task instanceof UpdateTask) { UpdateTask updateTask = (UpdateTask) pending.task; source = updateTask.source; timeInQueue = now - updateTask.addedAt; } else { source = "unknown"; timeInQueue = -1; } pendingClusterTasks.add(new PendingClusterTask(pending.insertionOrder, pending.priority, new StringText(source), timeInQueue, pending.executing)); } return pendingClusterTasks; } class UpdateTask extends PrioritizedRunnable { public final String source; public final ClusterStateUpdateTask updateTask; public final long addedAt = System.currentTimeMillis(); UpdateTask(String source, Priority priority, ClusterStateUpdateTask updateTask) { super(priority); this.source = source; this.updateTask = updateTask; } @Override public void run() { if (!lifecycle.started()) { logger.debug("processing [{}]: ignoring, cluster_service not started", source); return; } logger.debug("processing [{}]: execute", source); ClusterState previousClusterState = clusterState; ClusterState newClusterState; try { newClusterState = updateTask.execute(previousClusterState); } catch (Throwable e) { if (logger.isTraceEnabled()) { StringBuilder sb = new StringBuilder("failed to execute cluster state update, state:\nversion [").append(previousClusterState.version()).append("], source [").append(source).append("]\n"); sb.append(previousClusterState.nodes().prettyPrint()); sb.append(previousClusterState.routingTable().prettyPrint()); sb.append(previousClusterState.readOnlyRoutingNodes().prettyPrint()); logger.trace(sb.toString(), e); } updateTask.onFailure(source, e); return; } if (previousClusterState == newClusterState) { logger.debug("processing [{}]: no change in cluster_state", source); if (updateTask instanceof AckedClusterStateUpdateTask) { //no need to wait for ack if nothing changed, the update can be counted as acknowledged ((AckedClusterStateUpdateTask) updateTask).onAllNodesAcked(null); } if (updateTask instanceof ProcessedClusterStateUpdateTask) { ((ProcessedClusterStateUpdateTask) updateTask).clusterStateProcessed(source, previousClusterState, newClusterState); } return; } try { Discovery.AckListener ackListener = new NoOpAckListener(); if (newClusterState.nodes().localNodeMaster()) { // only the master controls the version numbers Builder builder = ClusterState.builder(newClusterState).version(newClusterState.version() + 1); if (previousClusterState.routingTable() != newClusterState.routingTable()) { builder.routingTable(RoutingTable.builder(newClusterState.routingTable()).version(newClusterState.routingTable().version() + 1)); } if (previousClusterState.metaData() != newClusterState.metaData()) { builder.metaData(MetaData.builder(newClusterState.metaData()).version(newClusterState.metaData().version() + 1)); } newClusterState = builder.build(); if (updateTask instanceof AckedClusterStateUpdateTask) { final AckedClusterStateUpdateTask ackedUpdateTask = (AckedClusterStateUpdateTask) updateTask; if (ackedUpdateTask.ackTimeout() == null || ackedUpdateTask.ackTimeout().millis() == 0) { ackedUpdateTask.onAckTimeout(); } else { try { ackListener = new AckCountDownListener(ackedUpdateTask, newClusterState.version(), newClusterState.nodes(), threadPool); } catch (EsRejectedExecutionException ex) { if (logger.isDebugEnabled()) { logger.debug("Couldn't schedule timeout thread - node might be shutting down", ex); } //timeout straightaway, otherwise we could wait forever as the timeout thread has not started ackedUpdateTask.onAckTimeout(); } } } } else { if (previousClusterState.blocks().hasGlobalBlock(Discovery.NO_MASTER_BLOCK) && !newClusterState.blocks().hasGlobalBlock(Discovery.NO_MASTER_BLOCK)) { // force an update, its a fresh update from the master as we transition from a start of not having a master to having one // have a fresh instances of routing and metadata to remove the chance that version might be the same Builder builder = ClusterState.builder(newClusterState); builder.routingTable(RoutingTable.builder(newClusterState.routingTable())); builder.metaData(MetaData.builder(newClusterState.metaData())); newClusterState = builder.build(); logger.debug("got first state from fresh master [{}]", newClusterState.nodes().masterNodeId()); } else if (newClusterState.version() < previousClusterState.version()) { // we got a cluster state with older version, when we are *not* the master, let it in since it might be valid // we check on version where applicable, like at ZenDiscovery#handleNewClusterStateFromMaster logger.debug("got smaller cluster state when not master [" + newClusterState.version() + "<" + previousClusterState.version() + "] from source [" + source + "]"); } } newClusterState.status(ClusterState.ClusterStateStatus.BEING_APPLIED); if (logger.isTraceEnabled()) { StringBuilder sb = new StringBuilder("cluster state updated, source [").append(source).append("]\n"); sb.append(newClusterState.prettyPrint()); logger.trace(sb.toString()); } else if (logger.isDebugEnabled()) { logger.debug("cluster state updated, version [{}], source [{}]", newClusterState.version(), source); } ClusterChangedEvent clusterChangedEvent = new ClusterChangedEvent(source, newClusterState, previousClusterState); // new cluster state, notify all listeners final DiscoveryNodes.Delta nodesDelta = clusterChangedEvent.nodesDelta(); if (nodesDelta.hasChanges() && logger.isInfoEnabled()) { String summary = nodesDelta.shortSummary(); if (summary.length() > 0) { logger.info("{}, reason: {}", summary, source); } } // TODO, do this in parallel (and wait) for (DiscoveryNode node : nodesDelta.addedNodes()) { if (!nodeRequiresConnection(node)) { continue; } try { transportService.connectToNode(node); } catch (Throwable e) { // the fault detection will detect it as failed as well logger.warn("failed to connect to node [" + node + "]", e); } } // if we are the master, publish the new state to all nodes // we publish here before we send a notification to all the listeners, since if it fails // we don't want to notify if (newClusterState.nodes().localNodeMaster()) { logger.debug("publishing cluster state version {}", newClusterState.version()); discoveryService.publish(newClusterState, ackListener); } // update the current cluster state clusterState = newClusterState; logger.debug("set local cluster state to version {}", newClusterState.version()); for (ClusterStateListener listener : priorityClusterStateListeners) { listener.clusterChanged(clusterChangedEvent); } for (ClusterStateListener listener : clusterStateListeners) { listener.clusterChanged(clusterChangedEvent); } for (ClusterStateListener listener : lastClusterStateListeners) { listener.clusterChanged(clusterChangedEvent); } if (!nodesDelta.removedNodes().isEmpty()) { threadPool.generic().execute(new Runnable() { @Override public void run() { for (DiscoveryNode node : nodesDelta.removedNodes()) { transportService.disconnectFromNode(node); } } }); } newClusterState.status(ClusterState.ClusterStateStatus.APPLIED); //manual ack only from the master at the end of the publish if (newClusterState.nodes().localNodeMaster()) { try { ackListener.onNodeAck(localNode(), null); } catch (Throwable t) { logger.debug("error while processing ack for master node [{}]", t, newClusterState.nodes().localNode()); } } if (updateTask instanceof ProcessedClusterStateUpdateTask) { ((ProcessedClusterStateUpdateTask) updateTask).clusterStateProcessed(source, previousClusterState, newClusterState); } logger.debug("processing [{}]: done applying updated cluster_state (version: {})", source, newClusterState.version()); } catch (Throwable t) { StringBuilder sb = new StringBuilder("failed to apply updated cluster state:\nversion [").append(newClusterState.version()).append("], source [").append(source).append("]\n"); sb.append(newClusterState.nodes().prettyPrint()); sb.append(newClusterState.routingTable().prettyPrint()); sb.append(newClusterState.readOnlyRoutingNodes().prettyPrint()); logger.warn(sb.toString(), t); // TODO: do we want to call updateTask.onFailure here? } } } class NotifyTimeout implements Runnable { final TimeoutClusterStateListener listener; final TimeValue timeout; ScheduledFuture future; NotifyTimeout(TimeoutClusterStateListener listener, TimeValue timeout) { this.listener = listener; this.timeout = timeout; } public void cancel() { future.cancel(false); } @Override public void run() { if (future.isCancelled()) { return; } if (lifecycle.stoppedOrClosed()) { listener.onClose(); } else { listener.onTimeout(this.timeout); } // note, we rely on the listener to remove itself in case of timeout if needed } } private class ReconnectToNodes implements Runnable { private ConcurrentMap<DiscoveryNode, Integer> failureCount = ConcurrentCollections.newConcurrentMap(); @Override public void run() { // master node will check against all nodes if its alive with certain discoveries implementations, // but we can't rely on that, so we check on it as well for (DiscoveryNode node : clusterState.nodes()) { if (lifecycle.stoppedOrClosed()) { return; } if (!nodeRequiresConnection(node)) { continue; } if (clusterState.nodes().nodeExists(node.id())) { // we double check existence of node since connectToNode might take time... if (!transportService.nodeConnected(node)) { try { transportService.connectToNode(node); } catch (Exception e) { if (lifecycle.stoppedOrClosed()) { return; } if (clusterState.nodes().nodeExists(node.id())) { // double check here as well, maybe its gone? Integer nodeFailureCount = failureCount.get(node); if (nodeFailureCount == null) { nodeFailureCount = 1; } else { nodeFailureCount = nodeFailureCount + 1; } // log every 6th failure if ((nodeFailureCount % 6) == 0) { // reset the failure count... nodeFailureCount = 0; logger.warn("failed to reconnect to node {}", e, node); } failureCount.put(node, nodeFailureCount); } } } } } // go over and remove failed nodes that have been removed DiscoveryNodes nodes = clusterState.nodes(); for (Iterator<DiscoveryNode> failedNodesIt = failureCount.keySet().iterator(); failedNodesIt.hasNext(); ) { DiscoveryNode failedNode = failedNodesIt.next(); if (!nodes.nodeExists(failedNode.id())) { failedNodesIt.remove(); } } if (lifecycle.started()) { reconnectToNodes = threadPool.schedule(reconnectInterval, ThreadPool.Names.GENERIC, this); } } } private boolean nodeRequiresConnection(DiscoveryNode node) { return localNode().shouldConnectTo(node); } private static class LocalNodeMasterListeners implements ClusterStateListener { private final List<LocalNodeMasterListener> listeners = new CopyOnWriteArrayList<>(); private final ThreadPool threadPool; private volatile boolean master = false; private LocalNodeMasterListeners(ThreadPool threadPool) { this.threadPool = threadPool; } @Override public void clusterChanged(ClusterChangedEvent event) { if (!master && event.localNodeMaster()) { master = true; for (LocalNodeMasterListener listener : listeners) { Executor executor = threadPool.executor(listener.executorName()); executor.execute(new OnMasterRunnable(listener)); } return; } if (master && !event.localNodeMaster()) { master = false; for (LocalNodeMasterListener listener : listeners) { Executor executor = threadPool.executor(listener.executorName()); executor.execute(new OffMasterRunnable(listener)); } } } private void add(LocalNodeMasterListener listener) { listeners.add(listener); } private void remove(LocalNodeMasterListener listener) { listeners.remove(listener); } private void clear() { listeners.clear(); } } private static class OnMasterRunnable implements Runnable { private final LocalNodeMasterListener listener; private OnMasterRunnable(LocalNodeMasterListener listener) { this.listener = listener; } @Override public void run() { listener.onMaster(); } } private static class OffMasterRunnable implements Runnable { private final LocalNodeMasterListener listener; private OffMasterRunnable(LocalNodeMasterListener listener) { this.listener = listener; } @Override public void run() { listener.offMaster(); } } private static class NoOpAckListener implements Discovery.AckListener { @Override public void onNodeAck(DiscoveryNode node, @Nullable Throwable t) { } @Override public void onTimeout() { } } private static class AckCountDownListener implements Discovery.AckListener { private static final ESLogger logger = Loggers.getLogger(AckCountDownListener.class); private final AckedClusterStateUpdateTask ackedUpdateTask; private final CountDown countDown; private final DiscoveryNodes nodes; private final long clusterStateVersion; private final Future<?> ackTimeoutCallback; private Throwable lastFailure; AckCountDownListener(AckedClusterStateUpdateTask ackedUpdateTask, long clusterStateVersion, DiscoveryNodes nodes, ThreadPool threadPool) { this.ackedUpdateTask = ackedUpdateTask; this.clusterStateVersion = clusterStateVersion; this.nodes = nodes; int countDown = 0; for (DiscoveryNode node : nodes) { if (ackedUpdateTask.mustAck(node)) { countDown++; } } //we always wait for at least 1 node (the master) countDown = Math.max(1, countDown); logger.trace("expecting {} acknowledgements for cluster_state update (version: {})", countDown, clusterStateVersion); this.countDown = new CountDown(countDown); this.ackTimeoutCallback = threadPool.schedule(ackedUpdateTask.ackTimeout(), ThreadPool.Names.GENERIC, new Runnable() { @Override public void run() { onTimeout(); } }); } @Override public void onNodeAck(DiscoveryNode node, @Nullable Throwable t) { if (!ackedUpdateTask.mustAck(node)) { //we always wait for the master ack anyway if (!node.equals(nodes.masterNode())) { return; } } if (t == null) { logger.trace("ack received from node [{}], cluster_state update (version: {})", node, clusterStateVersion); } else { this.lastFailure = t; logger.debug("ack received from node [{}], cluster_state update (version: {})", t, node, clusterStateVersion); } if (countDown.countDown()) { logger.trace("all expected nodes acknowledged cluster_state update (version: {})", clusterStateVersion); ackTimeoutCallback.cancel(true); ackedUpdateTask.onAllNodesAcked(lastFailure); } } @Override public void onTimeout() { if (countDown.fastForward()) { logger.trace("timeout waiting for acknowledgement for cluster_state update (version: {})", clusterStateVersion); ackedUpdateTask.onAckTimeout(); } } } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.support.v7.util; import java.lang.reflect.Array; /** * A Sorted list implementation that can keep items in order and also notify for changes in the * list * such that it can be bound to a {@link android.support.v7.widget.RecyclerView.Adapter * RecyclerView.Adapter}. * <p> * It keeps items ordered using the {@link Callback#compare(Object, Object)} method and uses * binary search to retrieve items. If the sorting criteria of your items may change, make sure you * call appropriate methods while editing them to avoid data inconsistencies. * <p> * You can control the order of items and change notifications via the {@link Callback} parameter. */ @SuppressWarnings("unchecked") public class SortedList<T> { /** * Used by {@link #indexOf(Object)} when he item cannot be found in the list. */ public static final int INVALID_POSITION = -1; private static final int MIN_CAPACITY = 10; private static final int CAPACITY_GROWTH = MIN_CAPACITY; private static final int INSERTION = 1; private static final int DELETION = 1 << 1; private static final int LOOKUP = 1 << 2; T[] mData; /** * The callback instance that controls the behavior of the SortedList and get notified when * changes happen. */ private Callback mCallback; private BatchedCallback mBatchedCallback; private int mSize; private final Class<T> mTClass; /** * Creates a new SortedList of type T. * * @param klass The class of the contents of the SortedList. * @param callback The callback that controls the behavior of SortedList. */ public SortedList(Class<T> klass, Callback<T> callback) { this(klass, callback, MIN_CAPACITY); } /** * Creates a new SortedList of type T. * * @param klass The class of the contents of the SortedList. * @param callback The callback that controls the behavior of SortedList. * @param initialCapacity The initial capacity to hold items. */ public SortedList(Class<T> klass, Callback<T> callback, int initialCapacity) { mTClass = klass; mData = (T[]) Array.newInstance(klass, initialCapacity); mCallback = callback; mSize = 0; } /** * The number of items in the list. * * @return The number of items in the list. */ public int size() { return mSize; } /** * Adds the given item to the list. If this is a new item, SortedList calls * {@link Callback#onInserted(int, int)}. * <p> * If the item already exists in the list and its sorting criteria is not changed, it is * replaced with the existing Item. SortedList uses * {@link Callback#areItemsTheSame(Object, Object)} to check if two items are the same item * and uses {@link Callback#areContentsTheSame(Object, Object)} to decide whether it should * call {@link Callback#onChanged(int, int)} or not. In both cases, it always removes the * reference to the old item and puts the new item into the backing array even if * {@link Callback#areContentsTheSame(Object, Object)} returns false. * <p> * If the sorting criteria of the item is changed, SortedList won't be able to find * its duplicate in the list which will result in having a duplicate of the Item in the list. * If you need to update sorting criteria of an item that already exists in the list, * use {@link #updateItemAt(int, Object)}. You can find the index of the item using * {@link #indexOf(Object)} before you update the object. * * @param item The item to be added into the list. * @return The index of the newly added item. * @see {@link Callback#compare(Object, Object)} * @see {@link Callback#areItemsTheSame(Object, Object)} * @see {@link Callback#areContentsTheSame(Object, Object)}} */ public int add(T item) { return add(item, true); } /** * Batches adapter updates that happen between calling this method until calling * {@link #endBatchedUpdates()}. For example, if you add multiple items in a loop * and they are placed into consecutive indices, SortedList calls * {@link Callback#onInserted(int, int)} only once with the proper item count. If an event * cannot be merged with the previous event, the previous event is dispatched * to the callback instantly. * <p> * After running your data updates, you <b>must</b> call {@link #endBatchedUpdates()} * which will dispatch any deferred data change event to the current callback. * <p> * A sample implementation may look like this: * <pre> * mSortedList.beginBatchedUpdates(); * try { * mSortedList.add(item1) * mSortedList.add(item2) * mSortedList.remove(item3) * ... * } finally { * mSortedList.endBatchedUpdates(); * } * </pre> * <p> * Instead of using this method to batch calls, you can use a Callback that extends * {@link BatchedCallback}. In that case, you must make sure that you are manually calling * {@link BatchedCallback#dispatchLastEvent()} right after you complete your data changes. * Failing to do so may create data inconsistencies with the Callback. * <p> * If the current Callback in an instance of {@link BatchedCallback}, calling this method * has no effect. */ public void beginBatchedUpdates() { if (mCallback instanceof BatchedCallback) { return; } if (mBatchedCallback == null) { mBatchedCallback = new BatchedCallback(mCallback); } mCallback = mBatchedCallback; } /** * Ends the update transaction and dispatches any remaining event to the callback. */ public void endBatchedUpdates() { if (mCallback instanceof BatchedCallback) { ((BatchedCallback) mCallback).dispatchLastEvent(); } if (mCallback == mBatchedCallback) { mCallback = mBatchedCallback.mWrappedCallback; } } private int add(T item, boolean notify) { int index = findIndexOf(item, INSERTION); if (index == INVALID_POSITION) { index = 0; } else if (index < mSize) { T existing = mData[index]; if (mCallback.areItemsTheSame(existing, item)) { if (mCallback.areContentsTheSame(existing, item)) { //no change but still replace the item mData[index] = item; return index; } else { mData[index] = item; mCallback.onChanged(index, 1); return index; } } } addToData(index, item); if (notify) { mCallback.onInserted(index, 1); } return index; } /** * Removes the provided item from the list and calls {@link Callback#onRemoved(int, int)}. * * @param item The item to be removed from the list. * @return True if item is removed, false if item cannot be found in the list. */ public boolean remove(T item) { return remove(item, true); } /** * Removes the item at the given index and calls {@link Callback#onRemoved(int, int)}. * * @param index The index of the item to be removed. * @return The removed item. */ public T removeItemAt(int index) { T item = get(index); removeItemAtIndex(index, true); return item; } private boolean remove(T item, boolean notify) { int index = findIndexOf(item, DELETION); if (index == INVALID_POSITION) { return false; } removeItemAtIndex(index, notify); return true; } private void removeItemAtIndex(int index, boolean notify) { System.arraycopy(mData, index + 1, mData, index, mSize - index - 1); mSize--; mData[mSize] = null; if (notify) { mCallback.onRemoved(index, 1); } } /** * Updates the item at the given index and calls {@link Callback#onChanged(int, int)} and/or * {@link Callback#onMoved(int, int)} if necessary. * <p> * You can use this method if you need to change an existing Item such that its position in the * list may change. * <p> * If the new object is a different object (<code>get(index) != item</code>) and * {@link Callback#areContentsTheSame(Object, Object)} returns <code>true</code>, SortedList * avoids calling {@link Callback#onChanged(int, int)} otherwise it calls * {@link Callback#onChanged(int, int)}. * <p> * If the new position of the item is different than the provided <code>index</code>, * SortedList * calls {@link Callback#onMoved(int, int)}. * * @param index The index of the item to replace * @param item The item to replace the item at the given Index. * @see #add(Object) */ public void updateItemAt(int index, T item) { final T existing = get(index); // assume changed if the same object is given back boolean contentsChanged = existing == item || !mCallback.areContentsTheSame(existing, item); if (existing != item) { // different items, we can use comparison and may avoid lookup final int cmp = mCallback.compare(existing, item); if (cmp == 0) { mData[index] = item; if (contentsChanged) { mCallback.onChanged(index, 1); } return; } } if (contentsChanged) { mCallback.onChanged(index, 1); } // TODO this done in 1 pass to avoid shifting twice. removeItemAtIndex(index, false); int newIndex = add(item, false); if (index != newIndex) { mCallback.onMoved(index, newIndex); } } /** * This method can be used to recalculate the position of the item at the given index, without * triggering an {@link Callback#onChanged(int, int)} callback. * <p> * If you are editing objects in the list such that their position in the list may change but * you don't want to trigger an onChange animation, you can use this method to re-position it. * If the item changes position, SortedList will call {@link Callback#onMoved(int, int)} * without * calling {@link Callback#onChanged(int, int)}. * <p> * A sample usage may look like: * * <pre> * final int position = mSortedList.indexOf(item); * item.incrementPriority(); // assume items are sorted by priority * mSortedList.recalculatePositionOfItemAt(position); * </pre> * In the example above, because the sorting criteria of the item has been changed, * mSortedList.indexOf(item) will not be able to find the item. This is why the code above * first * gets the position before editing the item, edits it and informs the SortedList that item * should be repositioned. * * @param index The current index of the Item whose position should be re-calculated. * @see #updateItemAt(int, Object) * @see #add(Object) */ public void recalculatePositionOfItemAt(int index) { // TODO can be improved final T item = get(index); removeItemAtIndex(index, false); int newIndex = add(item, false); if (index != newIndex) { mCallback.onMoved(index, newIndex); } } /** * Returns the item at the given index. * * @param index The index of the item to retrieve. * @return The item at the given index. * @throws java.lang.IndexOutOfBoundsException if provided index is negative or larger than the * size of the list. */ public T get(int index) throws IndexOutOfBoundsException { if (index >= mSize || index < 0) { throw new IndexOutOfBoundsException("Asked to get item at " + index + " but size is " + mSize); } return mData[index]; } /** * Returns the position of the provided item. * * @param item The item to query for position. * @return The position of the provided item or {@link #INVALID_POSITION} if item is not in the * list. */ public int indexOf(T item) { return findIndexOf(item, LOOKUP); } private int findIndexOf(T item, int reason) { int left = 0; int right = mSize; while (left < right) { final int middle = (left + right) / 2; T myItem = mData[middle]; final int cmp = mCallback.compare(myItem, item); if (cmp < 0) { left = middle + 1; } else if (cmp == 0) { if (mCallback.areItemsTheSame(myItem, item)) { return middle; } else { int exact = linearEqualitySearch(item, middle, left, right); if (reason == INSERTION) { return exact == INVALID_POSITION ? middle : exact; } else { return exact; } } } else { right = middle; } } return reason == INSERTION ? left : INVALID_POSITION; } private int linearEqualitySearch(T item, int middle, int left, int right) { // go left for (int next = middle - 1; next >= left; next--) { T nextItem = mData[next]; int cmp = mCallback.compare(nextItem, item); if (cmp != 0) { break; } if (mCallback.areItemsTheSame(nextItem, item)) { return next; } } for (int next = middle + 1; next < right; next++) { T nextItem = mData[next]; int cmp = mCallback.compare(nextItem, item); if (cmp != 0) { break; } if (mCallback.areItemsTheSame(nextItem, item)) { return next; } } return INVALID_POSITION; } private void addToData(int index, T item) { if (index > mSize) { throw new IndexOutOfBoundsException( "cannot add item to " + index + " because size is " + mSize); } if (mSize == mData.length) { // we are at the limit enlarge T[] newData = (T[]) Array.newInstance(mTClass, mData.length + CAPACITY_GROWTH); System.arraycopy(mData, 0, newData, 0, index); newData[index] = item; System.arraycopy(mData, index, newData, index + 1, mSize - index); mData = newData; } else { // just shift, we fit System.arraycopy(mData, index, mData, index + 1, mSize - index); mData[index] = item; } mSize++; } /** * The class that controls the behavior of the {@link SortedList}. * <p> * It defines how items should be sorted and how duplicates should be handled. * <p> * SortedList calls the callback methods on this class to notify changes about the underlying * data. */ public static abstract class Callback<T2> { /** * Similar to {@link java.util.Comparator#compare(Object, Object)}, should compare two and * return how they should be ordered. * * @param o1 The first object to compare. * @param o2 The second object to compare. * @return a negative integer, zero, or a positive integer as the * first argument is less than, equal to, or greater than the * second. */ abstract public int compare(T2 o1, T2 o2); /** * Called by the SortedList when an item is inserted at the given position. * * @param position The position of the new item. * @param count The number of items that have been added. */ abstract public void onInserted(int position, int count); /** * Called by the SortedList when an item is removed from the given position. * * @param position The position of the item which has been removed. * @param count The number of items which have been removed. */ abstract public void onRemoved(int position, int count); /** * Called by the SortedList when an item changes its position in the list. * * @param fromPosition The previous position of the item before the move. * @param toPosition The new position of the item. */ abstract public void onMoved(int fromPosition, int toPosition); /** * Called by the SortedList when the item at the given position is updated. * * @param position The position of the item which has been updated. * @param count The number of items which has changed. */ abstract public void onChanged(int position, int count); /** * Called by the SortedList when it wants to check whether two items have the same data * or not. SortedList uses this information to decide whether it should call * {@link #onChanged(int, int)} or not. * <p> * SortedList uses this method to check equality instead of {@link Object#equals(Object)} * so * that you can change its behavior depending on your UI. * <p> * For example, if you are using SortedList with a {@link android.support.v7.widget.RecyclerView.Adapter * RecyclerView.Adapter}, you should * return whether the items' visual representations are the same or not. * * @param oldItem The previous representation of the object. * @param newItem The new object that replaces the previous one. * @return True if the contents of the items are the same or false if they are different. */ abstract public boolean areContentsTheSame(T2 oldItem, T2 newItem); /** * Called by the SortedList to decide whether two object represent the same Item or not. * <p> * For example, if your items have unique ids, this method should check their equality. * * @param item1 The first item to check. * @param item2 The second item to check. * @return True if the two items represent the same object or false if they are different. */ abstract public boolean areItemsTheSame(T2 item1, T2 item2); } /** * A callback implementation that can batch notify events dispatched by the SortedList. * <p> * This class can be useful if you want to do multiple operations on a SortedList but don't * want to dispatch each event one by one, which may result in a performance issue. * <p> * For example, if you are going to add multiple items to a SortedList, BatchedCallback call * convert individual <code>onInserted(index, 1)</code> calls into one * <code>onInserted(index, N)</code> if items are added into consecutive indices. This change * can help RecyclerView resolve changes much more easily. * <p> * If consecutive changes in the SortedList are not suitable for batching, BatchingCallback * dispatches them as soon as such case is detected. After your edits on the SortedList is * complete, you <b>must</b> always call {@link BatchedCallback#dispatchLastEvent()} to flush * all changes to the Callback. */ public static class BatchedCallback<T2> extends Callback<T2> { private final Callback<T2> mWrappedCallback; static final int TYPE_NONE = 0; static final int TYPE_ADD = 1; static final int TYPE_REMOVE = 2; static final int TYPE_CHANGE = 3; static final int TYPE_MOVE = 4; int mLastEventType = TYPE_NONE; int mLastEventPosition = -1; int mLastEventCount = -1; /** * Creates a new BatchedCallback that wraps the provided Callback. * * @param wrappedCallback The Callback which should received the data change callbacks. * Other method calls (e.g. {@link #compare(Object, Object)} from * the SortedList are directly forwarded to this Callback. */ public BatchedCallback(Callback<T2> wrappedCallback) { mWrappedCallback = wrappedCallback; } @Override public int compare(T2 o1, T2 o2) { return mWrappedCallback.compare(o1, o2); } @Override public void onInserted(int position, int count) { if (mLastEventType == TYPE_ADD && position >= mLastEventPosition && position <= mLastEventPosition + mLastEventCount) { mLastEventCount += count; mLastEventPosition = Math.min(position, mLastEventPosition); return; } dispatchLastEvent(); mLastEventPosition = position; mLastEventCount = count; mLastEventType = TYPE_ADD; } @Override public void onRemoved(int position, int count) { if (mLastEventType == TYPE_REMOVE && mLastEventPosition == position) { mLastEventCount += count; return; } dispatchLastEvent(); mLastEventPosition = position; mLastEventCount = count; mLastEventType = TYPE_REMOVE; } @Override public void onMoved(int fromPosition, int toPosition) { dispatchLastEvent();//moves are not merged mWrappedCallback.onMoved(fromPosition, toPosition); } @Override public void onChanged(int position, int count) { if (mLastEventType == TYPE_CHANGE && !(position > mLastEventPosition + mLastEventCount || position + count < mLastEventPosition)) { // take potential overlap into account int previousEnd = mLastEventPosition + mLastEventCount; mLastEventPosition = Math.min(position, mLastEventPosition); mLastEventCount = Math.max(previousEnd, position + count) - mLastEventPosition; return; } dispatchLastEvent(); mLastEventPosition = position; mLastEventCount = count; mLastEventType = TYPE_CHANGE; } @Override public boolean areContentsTheSame(T2 oldItem, T2 newItem) { return mWrappedCallback.areContentsTheSame(oldItem, newItem); } @Override public boolean areItemsTheSame(T2 item1, T2 item2) { return mWrappedCallback.areItemsTheSame(item1, item2); } /** * This method dispatches any pending event notifications to the wrapped Callback. * You <b>must</b> always call this method after you are done with editing the SortedList. */ public void dispatchLastEvent() { if (mLastEventType == TYPE_NONE) { return; } switch (mLastEventType) { case TYPE_ADD: mWrappedCallback.onInserted(mLastEventPosition, mLastEventCount); break; case TYPE_REMOVE: mWrappedCallback.onRemoved(mLastEventPosition, mLastEventCount); break; case TYPE_CHANGE: mWrappedCallback.onChanged(mLastEventPosition, mLastEventCount); break; } mLastEventType = TYPE_NONE; } } }
package org.apereo.cas.gauth.token; import org.apereo.cas.authentication.OneTimeToken; import org.apereo.cas.configuration.model.support.mfa.gauth.DynamoDbGoogleAuthenticatorMultifactorProperties; import org.apereo.cas.dynamodb.DynamoDbQueryBuilder; import org.apereo.cas.dynamodb.DynamoDbTableUtils; import org.apereo.cas.util.CollectionUtils; import org.apereo.cas.util.serialization.JacksonObjectMapperFactory; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import lombok.val; import software.amazon.awssdk.services.dynamodb.DynamoDbClient; import software.amazon.awssdk.services.dynamodb.model.AttributeDefinition; import software.amazon.awssdk.services.dynamodb.model.AttributeValue; import software.amazon.awssdk.services.dynamodb.model.ComparisonOperator; import software.amazon.awssdk.services.dynamodb.model.DeleteItemRequest; import software.amazon.awssdk.services.dynamodb.model.KeySchemaElement; import software.amazon.awssdk.services.dynamodb.model.KeyType; import software.amazon.awssdk.services.dynamodb.model.PutItemRequest; import software.amazon.awssdk.services.dynamodb.model.ScalarAttributeType; import software.amazon.awssdk.services.dynamodb.model.ScanRequest; import java.time.LocalDateTime; import java.time.ZoneOffset; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; /** * This is {@link GoogleAuthenticatorDynamoDbTokenRepositoryFacilitator}. * * @author Misagh Moayyed * @since 6.5.0 */ @RequiredArgsConstructor @Slf4j public class GoogleAuthenticatorDynamoDbTokenRepositoryFacilitator { private static final ObjectMapper MAPPER = JacksonObjectMapperFactory.builder() .defaultTypingEnabled(false).build().toObjectMapper(); private final DynamoDbGoogleAuthenticatorMultifactorProperties dynamoDbProperties; private final DynamoDbClient amazonDynamoDBClient; @SneakyThrows private static Map<String, AttributeValue> buildTableAttributeValuesMap(final OneTimeToken record) { val values = new HashMap<String, AttributeValue>(); values.put(ColumnNames.ID.getColumnName(), AttributeValue.builder().n(String.valueOf(record.getId())).build()); values.put(ColumnNames.USERID.getColumnName(), AttributeValue.builder().s(record.getUserId().toLowerCase()).build()); values.put(ColumnNames.TOKEN.getColumnName(), AttributeValue.builder().n(String.valueOf(record.getToken()).toLowerCase()).build()); val time = record.getIssuedDateTime().toEpochSecond(ZoneOffset.UTC); values.put(ColumnNames.CREATION_TIME.getColumnName(), AttributeValue.builder().n(String.valueOf(time)).build()); values.put(ColumnNames.BODY.getColumnName(), AttributeValue.builder().s(MAPPER.writeValueAsString(record)).build()); LOGGER.debug("Created attribute values [{}] based on [{}]", values, record); return values; } @SneakyThrows private static GoogleAuthenticatorToken extractAttributeValuesFrom(final Map<String, AttributeValue> item) { return MAPPER.readValue(item.get(ColumnNames.BODY.getColumnName()).s(), new TypeReference<>() { }); } /** * Create table. * * @param deleteTables delete existing tables */ @SneakyThrows public void createTable(final boolean deleteTables) { DynamoDbTableUtils.createTable(amazonDynamoDBClient, dynamoDbProperties, dynamoDbProperties.getTokenTableName(), deleteTables, List.of(AttributeDefinition.builder() .attributeName(ColumnNames.ID.getColumnName()) .attributeType(ScalarAttributeType.N).build()), List.of(KeySchemaElement.builder() .attributeName(ColumnNames.ID.getColumnName()) .keyType(KeyType.HASH).build())); } /** * Find. * * @param uid the uid * @param otp the otp * @return the google authenticator token */ public GoogleAuthenticatorToken find(final String uid, final Integer otp) { val query = List.of( DynamoDbQueryBuilder.builder() .key(ColumnNames.USERID.getColumnName()) .attributeValue(List.of(AttributeValue.builder().s(uid.toLowerCase()).build())) .operator(ComparisonOperator.EQ) .build(), DynamoDbQueryBuilder.builder() .key(ColumnNames.TOKEN.getColumnName()) .attributeValue(List.of(AttributeValue.builder().n(String.valueOf(otp)).build())) .operator(ComparisonOperator.EQ) .build()); val results = getRecordsByKeys(query); return results.isEmpty() ? null : results.iterator().next(); } /** * Store. * * @param token the token */ public void store(final OneTimeToken token) { val values = buildTableAttributeValuesMap(token); val putItemRequest = PutItemRequest.builder().tableName(dynamoDbProperties.getTokenTableName()).item(values).build(); LOGGER.debug("Submitting put request [{}] for record [{}]", putItemRequest, token); val putItemResult = amazonDynamoDBClient.putItem(putItemRequest); LOGGER.debug("Record added with result [{}]", putItemResult); } /** * Count. * * @return the long */ public long count() { val scan = ScanRequest.builder().tableName(dynamoDbProperties.getTokenTableName()).build(); LOGGER.debug("Scanning table with request [{}] to count items", scan); val result = this.amazonDynamoDBClient.scan(scan); LOGGER.debug("Scanned table with result [{}]", scan); return result.count(); } /** * Count. * * @param uid the uid * @return the long */ public long count(final String uid) { val query = List.of( DynamoDbQueryBuilder.builder() .key(ColumnNames.USERID.getColumnName()) .attributeValue(List.of(AttributeValue.builder().s(uid.toLowerCase()).build())) .operator(ComparisonOperator.EQ) .build()); return getRecordsByKeys(query).size(); } /** * Remove all. */ public void removeAll() { createTable(true); } /** * Remove. * * @param otp the otp */ public void remove(final Integer otp) { val query = List.of( DynamoDbQueryBuilder.builder() .key(ColumnNames.TOKEN.getColumnName()) .attributeValue(List.of(AttributeValue.builder().n(String.valueOf(otp)).build())) .operator(ComparisonOperator.EQ) .build()); val records = getRecordsByKeys(query); records.forEach(record -> { val del = DeleteItemRequest.builder() .tableName(dynamoDbProperties.getTokenTableName()) .key(CollectionUtils.wrap(ColumnNames.ID.getColumnName(), AttributeValue.builder().n(String.valueOf(record.getId())).build())) .build(); LOGGER.debug("Submitting delete request [{}] for [{}]", del, record.getId()); val res = amazonDynamoDBClient.deleteItem(del); LOGGER.debug("Delete request came back with result [{}]", res); }); } /** * Remove. * * @param uid the uid */ public void remove(final String uid) { val query = List.of( DynamoDbQueryBuilder.builder() .key(ColumnNames.USERID.getColumnName()) .attributeValue(List.of(AttributeValue.builder().s(String.valueOf(uid)).build())) .operator(ComparisonOperator.EQ) .build()); val records = getRecordsByKeys(query); records.forEach(record -> { val del = DeleteItemRequest.builder() .tableName(dynamoDbProperties.getTokenTableName()) .key(CollectionUtils.wrap(ColumnNames.ID.getColumnName(), AttributeValue.builder().n(String.valueOf(record.getId())).build())) .build(); LOGGER.debug("Submitting delete request [{}] for [{}]", del, record.getId()); val res = amazonDynamoDBClient.deleteItem(del); LOGGER.debug("Delete request came back with result [{}]", res); }); } /** * Remove. * * @param uid the uid * @param otp the otp */ public void remove(final String uid, final Integer otp) { val query = List.of( DynamoDbQueryBuilder.builder() .key(ColumnNames.USERID.getColumnName()) .attributeValue(List.of(AttributeValue.builder().s(String.valueOf(uid)).build())) .operator(ComparisonOperator.EQ) .build(), DynamoDbQueryBuilder.builder() .key(ColumnNames.TOKEN.getColumnName()) .attributeValue(List.of(AttributeValue.builder().n(String.valueOf(otp)).build())) .operator(ComparisonOperator.EQ) .build()); val records = getRecordsByKeys(query); records.forEach(record -> { val del = DeleteItemRequest.builder() .tableName(dynamoDbProperties.getTokenTableName()) .key(CollectionUtils.wrap(ColumnNames.ID.getColumnName(), AttributeValue.builder().n(String.valueOf(record.getId())).build())) .build(); LOGGER.debug("Submitting delete request [{}] for [{}]", del, record.getId()); val res = amazonDynamoDBClient.deleteItem(del); LOGGER.debug("Delete request came back with result [{}]", res); }); } /** * Remove from. * * @param time the time */ public void removeFrom(final LocalDateTime time) { val epoch = time.toEpochSecond(ZoneOffset.UTC); val query = List.of( DynamoDbQueryBuilder.builder() .key(ColumnNames.CREATION_TIME.getColumnName()) .attributeValue(List.of(AttributeValue.builder().n(String.valueOf(epoch)).build())) .operator(ComparisonOperator.GE) .build()); val records = getRecordsByKeys(query); records.forEach(record -> { val del = DeleteItemRequest.builder() .tableName(dynamoDbProperties.getTokenTableName()) .key(CollectionUtils.wrap(ColumnNames.ID.getColumnName(), AttributeValue.builder().n(String.valueOf(record.getId())).build())) .build(); LOGGER.debug("Submitting delete request [{}] since [{}]", del, epoch); val res = amazonDynamoDBClient.deleteItem(del); LOGGER.debug("Delete request came back with result [{}]", res); }); } /** * The column names. */ @Getter @RequiredArgsConstructor public enum ColumnNames { /** * User id column. */ ID("id"), /** * User id column. */ USERID("userid"), /** * id column. */ TOKEN("token"), /** * creation time column. */ CREATION_TIME("creationTime"), /** * properties column. */ BODY("body"); private final String columnName; } private Set<GoogleAuthenticatorToken> getRecordsByKeys(final List<DynamoDbQueryBuilder> queries) { return DynamoDbTableUtils.getRecordsByKeys(amazonDynamoDBClient, dynamoDbProperties.getTokenTableName(), queries, GoogleAuthenticatorDynamoDbTokenRepositoryFacilitator::extractAttributeValuesFrom) .collect(Collectors.toSet()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. * * @author Wei Zhang, Language Technology Institute, School of Computer Science, Carnegie-Mellon University. * email: wei.zhang@cs.cmu.edu * */ package edu.cmu.geoparser.resource.gazindexing; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.Field.Index; import org.apache.lucene.document.Field.TermVector; import org.apache.lucene.document.NumericField; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TopDocs; import edu.cmu.geoparser.common.StringUtil; import edu.cmu.geoparser.io.GetReader; import edu.cmu.geoparser.io.GetWriter; /* * This is used for indexing the Gazetteer for misspelling checking. * * @Input: Gaz entries * * @Output: Index of Gaz * * Features used for each word: (e.g. Chilee) 1. c,h,i,l,e,e 2. ch,hi,il,le,ee * 3. c_0,h_1,i_2,l_3,e_4,e_5 */ public class GazIndexerForAlternativeNames { static String f_unigram, f_bigram, f_trigram, f_positionunigram; static String getUnigram() { return f_unigram; } static String getBigram() { return f_bigram; } static String getTrigram() { return f_trigram; } static String getPositionUnigram() { return f_positionunigram; } static void getIndexFeatures(String phrase) { // prepare for indexing char[] locchars = StringUtil.getDeAccentLoweredChars(phrase); String[] bigramloc = StringUtil.getBigram(locchars); String[] trigramloc = StringUtil.getTrigram(locchars); String[] positionloc = StringUtil.getPosition(locchars); f_unigram = StringUtil.factorize(locchars); f_bigram = StringUtil.factorize(bigramloc); f_trigram = StringUtil.factorize(trigramloc); f_positionunigram = StringUtil.factorize(positionloc); } // main method for indexing gazatteer into index. void indexGazatteer(BufferedReader br, IndexWriter iw) throws IOException, InterruptedException { Document d = new Document(); NumericField nfid = new NumericField("ID", Field.Store.YES, true); NumericField nflong = new NumericField("LONGTITUDE", Field.Store.YES, true); NumericField nfla = new NumericField("LATITUDE", Field.Store.YES, true); NumericField nfpop = new NumericField("POPULATION", Field.Store.YES, true); Field sforigin = new Field("ORIGIN", false, "", Field.Store.YES, Index.ANALYZED, TermVector.NO); Field normws = new Field("NORM-WS", false, "", Field.Store.YES, Index.NOT_ANALYZED, TermVector.NO); Field normnws = new Field("NORM-NO-WS", false, "", Field.Store.YES, Index.NOT_ANALYZED, TermVector.NO); Field sfotherlang = new Field("OTHERLANG", false, "", Field.Store.YES, Index.NOT_ANALYZED, TermVector.NO); Field sfunigram = new Field("UNIGRAM", false, "", Field.Store.YES, Index.ANALYZED, TermVector.NO); Field sfbigram = new Field("BIGRAM", false, "", Field.Store.YES, Index.ANALYZED, TermVector.NO); Field sftrigram = new Field("TRIGRAM", false, "", Field.Store.YES, Index.ANALYZED, TermVector.NO); Field sfposition = new Field("POSITION", false, "", Field.Store.YES, Index.ANALYZED, TermVector.NO); Field sfcountrystate = new Field("COUNTRYSTATE", false, "", Field.Store.YES, Index.NOT_ANALYZED, TermVector.NO); Field sffeature = new Field("FEATURE", false, "", Field.Store.YES, Index.NOT_ANALYZED, TermVector.NO); Field sftimezone = new Field("TIMEZONE", false, "", Field.Store.YES, Index.NOT_ANALYZED, TermVector.NO); d.add(nfid); d.add(nflong); d.add(nfla); d.add(nfpop); d.add(sforigin); d.add(normws); d.add(normnws); d.add(sfotherlang); d.add(sfunigram); d.add(sfbigram); d.add(sftrigram); d.add(sfposition); d.add(sfcountrystate); d.add(sffeature); d.add(sftimezone); String line; int linen = 0; while ((line = br.readLine()) != null) { if (linen++ % 10000 == 0) System.out.println(linen + "\n" + line); String[] column = line.trim().split("\t"); // get other columns except for the location words String id = column[0]; long lid = Long.parseLong(id); String phrase = column[1]; String otherlang = column[3]; String latitude = column[4]; String longtitude = column[5]; double dlong, dla; if (latitude == null) { dlong = 999; dla = 999; } else { dlong = Double.parseDouble(longtitude); dla = Double.parseDouble(latitude); } String featureclass = column[6]; String feature = column[7]; String country = column[8]; String state = column[10] + "_" + column[11] + "_" + column[12] + "_" + column[13]; String population = column[14]; long longpop; if (population == null) longpop = -1l; longpop = Long.parseLong(population); String timezone = column[17]; // To Do: set values to document d, and index it nfid.setLongValue(lid);// 1 nflong.setDoubleValue(dlong); nfla.setDoubleValue(dla); nfpop.setLongValue(longpop); sforigin.setValue(phrase);// 5 normws.setValue(StringUtil.getDeAccentLoweredString(phrase)); normnws.setValue(StringUtil.getDeAccentLoweredString(phrase).replaceAll(" ", "")); sfotherlang.setValue(otherlang); getIndexFeatures(phrase); sfunigram.setValue(getUnigram()); sfbigram.setValue(getBigram()); sftrigram.setValue(getTrigram()); sfposition.setValue(getPositionUnigram());// 10 sfcountrystate.setValue(country + "_" + state); sffeature.setValue(featureclass + "_" + feature); sftimezone.setValue(timezone);// 13 // add this new document. iw.addDocument(d); String[]otherlangs = otherlang.split(","); if(otherlangs.length<2)continue; for (String ph: otherlangs){ nfid.setLongValue(lid);// 1 nflong.setDoubleValue(dlong); nfla.setDoubleValue(dla); nfpop.setLongValue(longpop); sforigin.setValue(ph);// 5 normws.setValue(StringUtil.getDeAccentLoweredString(ph)); normnws.setValue(StringUtil.getDeAccentLoweredString(ph).replaceAll(" ", "")); sfotherlang.setValue(""); getIndexFeatures(ph); sfunigram.setValue(getUnigram()); sfbigram.setValue(getBigram()); sftrigram.setValue(getTrigram()); sfposition.setValue(getPositionUnigram());// 10 sfcountrystate.setValue(country + "_" + state); sffeature.setValue(featureclass + "_" + feature); sftimezone.setValue(timezone);// 13 // add this new document. iw.addDocument(d); } } } public static void main(String argv[]) throws Exception { GazIndexerForAlternativeNames gi = new GazIndexerForAlternativeNames(); argv[0]="-write"; argv[1] ="GeoNames/EG.txt";argv[2]="EG"; String mode = argv[0]; if (mode.equals("-write")) { BufferedReader br = GetReader.getUTF8FileReader(argv[1]); IndexWriter iw = GetWriter.getIndexWriter(argv[2]); iw.deleteAll(); gi.indexGazatteer(br, iw); iw.optimize(); iw.close(); br.close(); } if (mode.equals("-read")) { System.out.println("input id. Output basic information. For debugging."); // query first two fields. IndexSearcher is = GetReader.getIndexSearcher(argv[1]); BufferedReader r = new BufferedReader(new InputStreamReader(System.in, "utf-8")); String line; while ((line = r.readLine()) != null) { long id; try { id = Long.parseLong(line); } catch (Exception e) { System.err.println("number wrong."); continue; } Query q = NumericRangeQuery.newLongRange("ID", id, id, true, true); long start = System.currentTimeMillis(); TopDocs docs = is.search(q, 1); if (docs == null) { System.err.println("Not found."); continue; } if (docs.scoreDocs.length == 0) { System.err.println("Not found."); continue; } ScoreDoc sd = docs.scoreDocs[0]; Document d = is.doc(sd.doc); long end = System.currentTimeMillis(); System.out.println(d.get("ID")); System.out.println(d.get("ORIGIN")); System.out.println(d.get("LONGTITUDE") + " " + d.get("LATITUDE")); System.out.println("lookup time: " + (end - start)); } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.indices.recovery; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRoutingHelper; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.engine.NoOpEngine; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardTestCase; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.StoreFileMetadata; import org.elasticsearch.index.translog.Translog; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CyclicBarrier; import java.util.stream.Collectors; import java.util.stream.LongStream; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.sameInstance; public class PeerRecoveryTargetServiceTests extends IndexShardTestCase { public void testWriteFileChunksConcurrently() throws Exception { IndexShard sourceShard = newStartedShard(true); int numDocs = between(20, 100); for (int i = 0; i < numDocs; i++) { indexDoc(sourceShard, "_doc", Integer.toString(i)); } sourceShard.flush(new FlushRequest()); Store.MetadataSnapshot sourceSnapshot = sourceShard.store().getMetadata(null); List<StoreFileMetadata> mdFiles = new ArrayList<>(); for (StoreFileMetadata md : sourceSnapshot) { mdFiles.add(md); } final IndexShard targetShard = newShard(false); final DiscoveryNode pNode = getFakeDiscoNode(sourceShard.routingEntry().currentNodeId()); final DiscoveryNode rNode = getFakeDiscoNode(targetShard.routingEntry().currentNodeId()); targetShard.markAsRecovering("test-peer-recovery", new RecoveryState(targetShard.routingEntry(), rNode, pNode)); final RecoveryTarget recoveryTarget = new RecoveryTarget(targetShard, null, null); final PlainActionFuture<Void> receiveFileInfoFuture = new PlainActionFuture<>(); recoveryTarget.receiveFileInfo( mdFiles.stream().map(StoreFileMetadata::name).collect(Collectors.toList()), mdFiles.stream().map(StoreFileMetadata::length).collect(Collectors.toList()), Collections.emptyList(), Collections.emptyList(), 0, receiveFileInfoFuture ); receiveFileInfoFuture.actionGet(); List<RecoveryFileChunkRequest> requests = new ArrayList<>(); long seqNo = 0; for (StoreFileMetadata md : mdFiles) { try (IndexInput in = sourceShard.store().directory().openInput(md.name(), IOContext.READONCE)) { int pos = 0; while (pos < md.length()) { int length = between(1, Math.toIntExact(md.length() - pos)); byte[] buffer = new byte[length]; in.readBytes(buffer, 0, length); requests.add(new RecoveryFileChunkRequest(0, seqNo++, sourceShard.shardId(), md, pos, new BytesArray(buffer), pos + length == md.length(), 1, 1)); pos += length; } } } Randomness.shuffle(requests); BlockingQueue<RecoveryFileChunkRequest> queue = new ArrayBlockingQueue<>(requests.size()); queue.addAll(requests); Thread[] senders = new Thread[between(1, 4)]; CyclicBarrier barrier = new CyclicBarrier(senders.length); for (int i = 0; i < senders.length; i++) { senders[i] = new Thread(() -> { try { barrier.await(); RecoveryFileChunkRequest r; while ((r = queue.poll()) != null) { recoveryTarget.writeFileChunk(r.metadata(), r.position(), r.content(), r.lastChunk(), r.totalTranslogOps(), ActionListener.wrap(ignored -> {}, e -> { throw new AssertionError(e); })); } } catch (Exception e) { throw new AssertionError(e); } }); senders[i].start(); } for (Thread sender : senders) { sender.join(); } PlainActionFuture<Void> cleanFilesFuture = new PlainActionFuture<>(); recoveryTarget.cleanFiles(0, Long.parseLong(sourceSnapshot.getCommitUserData().get(SequenceNumbers.MAX_SEQ_NO)), sourceSnapshot, cleanFilesFuture); cleanFilesFuture.actionGet(); recoveryTarget.decRef(); Store.MetadataSnapshot targetSnapshot = targetShard.snapshotStoreMetadata(); Store.RecoveryDiff diff = sourceSnapshot.recoveryDiff(targetSnapshot); assertThat(diff.different, empty()); closeShards(sourceShard, targetShard); } private SeqNoStats populateRandomData(IndexShard shard) throws IOException { List<Long> seqNos = LongStream.range(0, 100).boxed().collect(Collectors.toList()); Randomness.shuffle(seqNos); for (long seqNo : seqNos) { shard.applyIndexOperationOnReplica(seqNo, 1, shard.getOperationPrimaryTerm(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, new SourceToParse(shard.shardId().getIndexName(), UUIDs.randomBase64UUID(), new BytesArray("{}"), XContentType.JSON)); if (randomInt(100) < 5) { shard.flush(new FlushRequest().waitIfOngoing(true)); } } shard.sync(); long globalCheckpoint = randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, shard.getLocalCheckpoint()); shard.updateGlobalCheckpointOnReplica(globalCheckpoint, "test"); shard.sync(); return shard.seqNoStats(); } public void testPrepareIndexForPeerRecovery() throws Exception { DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), Collections.emptyMap(), Collections.emptySet(), Version.CURRENT); // empty copy IndexShard shard = newShard(false); shard.markAsRecovering("for testing", new RecoveryState(shard.routingEntry(), localNode, localNode)); shard.prepareForIndexRecovery(); assertThat(shard.recoverLocallyUpToGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO)); assertThat(shard.recoveryState().getTranslog().totalLocal(), equalTo(RecoveryState.Translog.UNKNOWN)); assertThat(shard.recoveryState().getTranslog().recoveredOperations(), equalTo(0)); assertThat(shard.getLastKnownGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO)); closeShards(shard); // good copy shard = newStartedShard(false); long globalCheckpoint = populateRandomData(shard).getGlobalCheckpoint(); Optional<SequenceNumbers.CommitInfo> safeCommit = shard.store().findSafeIndexCommit(globalCheckpoint); assertTrue(safeCommit.isPresent()); int expectedTotalLocal = 0; if (safeCommit.get().localCheckpoint < globalCheckpoint) { try (Translog.Snapshot snapshot = getTranslog(shard).newSnapshot(safeCommit.get().localCheckpoint + 1, globalCheckpoint)) { Translog.Operation op; while ((op = snapshot.next()) != null) { if (op.seqNo() <= globalCheckpoint) { expectedTotalLocal++; } } } } IndexShard replica = reinitShard(shard, ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.PeerRecoverySource.INSTANCE)); replica.markAsRecovering("for testing", new RecoveryState(replica.routingEntry(), localNode, localNode)); replica.prepareForIndexRecovery(); assertThat(replica.recoverLocallyUpToGlobalCheckpoint(), equalTo(globalCheckpoint + 1)); assertThat(replica.recoveryState().getTranslog().totalLocal(), equalTo(expectedTotalLocal)); assertThat(replica.recoveryState().getTranslog().recoveredOperations(), equalTo(expectedTotalLocal)); assertThat(replica.getLastKnownGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO)); closeShards(replica); // corrupted copy shard = newStartedShard(false); if (randomBoolean()) { populateRandomData(shard); } shard.store().markStoreCorrupted(new IOException("test")); replica = reinitShard(shard, ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.PeerRecoverySource.INSTANCE)); replica.markAsRecovering("for testing", new RecoveryState(replica.routingEntry(), localNode, localNode)); replica.prepareForIndexRecovery(); assertThat(replica.recoverLocallyUpToGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO)); assertThat(replica.recoveryState().getTranslog().totalLocal(), equalTo(RecoveryState.Translog.UNKNOWN)); assertThat(replica.recoveryState().getTranslog().recoveredOperations(), equalTo(0)); assertThat(replica.getLastKnownGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO)); closeShards(replica); // copy with truncated translog shard = newStartedShard(false); globalCheckpoint = populateRandomData(shard).getGlobalCheckpoint(); replica = reinitShard(shard, ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.PeerRecoverySource.INSTANCE)); String translogUUID = Translog.createEmptyTranslog(replica.shardPath().resolveTranslog(), globalCheckpoint, replica.shardId(), replica.getPendingPrimaryTerm()); replica.store().associateIndexWithNewTranslog(translogUUID); safeCommit = replica.store().findSafeIndexCommit(globalCheckpoint); replica.markAsRecovering("for testing", new RecoveryState(replica.routingEntry(), localNode, localNode)); replica.prepareForIndexRecovery(); if (safeCommit.isPresent()) { assertThat(replica.recoverLocallyUpToGlobalCheckpoint(), equalTo(safeCommit.get().localCheckpoint + 1)); assertThat(replica.recoveryState().getTranslog().totalLocal(), equalTo(0)); } else { assertThat(replica.recoverLocallyUpToGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO)); assertThat(replica.recoveryState().getTranslog().totalLocal(), equalTo(RecoveryState.Translog.UNKNOWN)); } assertThat(replica.recoveryState().getTranslog().recoveredOperations(), equalTo(0)); assertThat(replica.getLastKnownGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO)); closeShards(replica); } public void testClosedIndexSkipsLocalRecovery() throws Exception { DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), Collections.emptyMap(), Collections.emptySet(), Version.CURRENT); IndexShard shard = newStartedShard(false); long globalCheckpoint = populateRandomData(shard).getGlobalCheckpoint(); Optional<SequenceNumbers.CommitInfo> safeCommit = shard.store().findSafeIndexCommit(globalCheckpoint); assertTrue(safeCommit.isPresent()); final IndexMetadata indexMetadata; if (randomBoolean()) { indexMetadata = IndexMetadata.builder(shard.indexSettings().getIndexMetadata()) .settings(shard.indexSettings().getSettings()) .state(IndexMetadata.State.CLOSE).build(); } else { indexMetadata = IndexMetadata.builder(shard.indexSettings().getIndexMetadata()) .settings(Settings.builder().put(shard.indexSettings().getSettings()) .put(IndexMetadata.SETTING_BLOCKS_WRITE, true)).build(); } IndexShard replica = reinitShard(shard, ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.PeerRecoverySource.INSTANCE), indexMetadata, NoOpEngine::new); replica.markAsRecovering("for testing", new RecoveryState(replica.routingEntry(), localNode, localNode)); replica.prepareForIndexRecovery(); assertThat(replica.recoverLocallyUpToGlobalCheckpoint(), equalTo(safeCommit.get().localCheckpoint + 1)); assertThat(replica.recoveryState().getTranslog().totalLocal(), equalTo(0)); assertThat(replica.recoveryState().getTranslog().recoveredOperations(), equalTo(0)); assertThat(replica.getLastKnownGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO)); closeShards(replica); } public void testResetStartingSeqNoIfLastCommitCorrupted() throws Exception { IndexShard shard = newStartedShard(false); populateRandomData(shard); DiscoveryNode pNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), Collections.emptyMap(), Collections.emptySet(), Version.CURRENT); DiscoveryNode rNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), Collections.emptyMap(), Collections.emptySet(), Version.CURRENT); shard = reinitShard(shard, ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.PeerRecoverySource.INSTANCE)); shard.markAsRecovering("peer recovery", new RecoveryState(shard.routingEntry(), pNode, rNode)); shard.prepareForIndexRecovery(); long startingSeqNo = shard.recoverLocallyUpToGlobalCheckpoint(); shard.store().markStoreCorrupted(new IOException("simulated")); RecoveryTarget recoveryTarget = new RecoveryTarget(shard, null, null); StartRecoveryRequest request = PeerRecoveryTargetService.getStartRecoveryRequest(logger, rNode, recoveryTarget, startingSeqNo); assertThat(request.startingSeqNo(), equalTo(UNASSIGNED_SEQ_NO)); assertThat(request.metadataSnapshot().size(), equalTo(0)); recoveryTarget.decRef(); closeShards(shard); } public void testResetStartRequestIfTranslogIsCorrupted() throws Exception { DiscoveryNode pNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), Collections.emptyMap(), Collections.emptySet(), Version.CURRENT); DiscoveryNode rNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), Collections.emptyMap(), Collections.emptySet(), Version.CURRENT); IndexShard shard = newStartedShard(false); final SeqNoStats seqNoStats = populateRandomData(shard); shard.close("test", false); if (randomBoolean()) { shard.store().associateIndexWithNewTranslog(UUIDs.randomBase64UUID()); } else if (randomBoolean()) { Translog.createEmptyTranslog( shard.shardPath().resolveTranslog(), seqNoStats.getGlobalCheckpoint(), shard.shardId(), shard.getOperationPrimaryTerm()); } else { IOUtils.rm(shard.shardPath().resolveTranslog()); } shard = reinitShard(shard, ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.PeerRecoverySource.INSTANCE)); shard.markAsRecovering("peer recovery", new RecoveryState(shard.routingEntry(), pNode, rNode)); shard.prepareForIndexRecovery(); RecoveryTarget recoveryTarget = new RecoveryTarget(shard, null, null); StartRecoveryRequest request = PeerRecoveryTargetService.getStartRecoveryRequest( logger, rNode, recoveryTarget, randomNonNegativeLong()); assertThat(request.startingSeqNo(), equalTo(UNASSIGNED_SEQ_NO)); assertThat(request.metadataSnapshot(), sameInstance(Store.MetadataSnapshot.EMPTY)); recoveryTarget.decRef(); closeShards(shard); } }
/* * Copyright 2014 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.flatbuffers; import static com.google.flatbuffers.Constants.*; import java.nio.CharBuffer; import java.nio.charset.CharacterCodingException; import java.nio.charset.CharsetEncoder; import java.nio.charset.CoderResult; import java.util.Arrays; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.charset.Charset; /// @file /// @addtogroup flatbuffers_java_api /// @{ /** * Class that helps you build a FlatBuffer. See the section * "Use in Java/C#" in the main FlatBuffers documentation. */ public class FlatBufferBuilder { /// @cond FLATBUFFERS_INTERNAL ByteBuffer bb; // Where we construct the FlatBuffer. int space; // Remaining space in the ByteBuffer. static final Charset utf8charset = Charset.forName("UTF-8"); // The UTF-8 character set used by FlatBuffers. int minalign = 1; // Minimum alignment encountered so far. int[] vtable = null; // The vtable for the current table. int vtable_in_use = 0; // The amount of fields we're actually using. boolean nested = false; // Whether we are currently serializing a table. boolean finished = false; // Whether the buffer is finished. int object_start; // Starting offset of the current struct/table. int[] vtables = new int[16]; // List of offsets of all vtables. int num_vtables = 0; // Number of entries in `vtables` in use. int vector_num_elems = 0; // For the current vector being built. boolean force_defaults = false; // False omits default values from the serialized data. CharsetEncoder encoder = utf8charset.newEncoder(); ByteBuffer dst; /// @endcond /** * Start with a buffer of size `initial_size`, then grow as required. * * @param initial_size The initial size of the internal buffer to use. */ public FlatBufferBuilder(int initial_size) { if (initial_size <= 0) initial_size = 1; space = initial_size; bb = newByteBuffer(initial_size); } /** * Start with a buffer of 1KiB, then grow as required. */ public FlatBufferBuilder() { this(1024); } /** * Alternative constructor allowing reuse of {@link ByteBuffer}s. The builder * can still grow the buffer as necessary. User classes should make sure * to call {@link #dataBuffer()} to obtain the resulting encoded message. * * @param existing_bb The byte buffer to reuse. */ public FlatBufferBuilder(ByteBuffer existing_bb) { init(existing_bb); } /** * Alternative initializer that allows reusing this object on an existing * `ByteBuffer`. This method resets the builder's internal state, but keeps * objects that have been allocated for temporary storage. * * @param existing_bb The byte buffer to reuse. * @return Returns `this`. */ public FlatBufferBuilder init(ByteBuffer existing_bb){ bb = existing_bb; bb.clear(); bb.order(ByteOrder.LITTLE_ENDIAN); minalign = 1; space = bb.capacity(); vtable_in_use = 0; nested = false; finished = false; object_start = 0; num_vtables = 0; vector_num_elems = 0; return this; } /// @cond FLATBUFFERS_INTERNAL /** * Create a `ByteBuffer` with a given capacity. * * @param capacity The size of the `ByteBuffer` to allocate. * @return Returns the new `ByteBuffer` that was allocated. */ static ByteBuffer newByteBuffer(int capacity) { ByteBuffer newbb = ByteBuffer.allocate(capacity); newbb.order(ByteOrder.LITTLE_ENDIAN); return newbb; } /** * Doubles the size of the backing {@link ByteBuffer} and copies the old data towards the * end of the new buffer (since we build the buffer backwards). * * @param bb The current buffer with the existing data. * @return A new byte buffer with the old data copied copied to it. The data is * located at the end of the buffer. */ static ByteBuffer growByteBuffer(ByteBuffer bb) { int old_buf_size = bb.capacity(); if ((old_buf_size & 0xC0000000) != 0) // Ensure we don't grow beyond what fits in an int. throw new AssertionError("FlatBuffers: cannot grow buffer beyond 2 gigabytes."); int new_buf_size = old_buf_size << 1; bb.position(0); ByteBuffer nbb = newByteBuffer(new_buf_size); nbb.position(new_buf_size - old_buf_size); nbb.put(bb); return nbb; } /** * Offset relative to the end of the buffer. * * @return Offset relative to the end of the buffer. */ public int offset() { return bb.capacity() - space; } /** * Add zero valued bytes to prepare a new entry to be added. * * @param byte_size Number of bytes to add. */ public void pad(int byte_size) { for (int i = 0; i < byte_size; i++) bb.put(--space, (byte)0); } /** * Prepare to write an element of `size` after `additional_bytes` * have been written, e.g. if you write a string, you need to align such * the int length field is aligned to {@link com.google.flatbuffers.Constants#SIZEOF_INT}, and * the string data follows it directly. If all you need to do is alignment, `additional_bytes` * will be 0. * * @param size This is the of the new element to write. * @param additional_bytes The padding size. */ public void prep(int size, int additional_bytes) { // Track the biggest thing we've ever aligned to. if (size > minalign) minalign = size; // Find the amount of alignment needed such that `size` is properly // aligned after `additional_bytes` int align_size = ((~(bb.capacity() - space + additional_bytes)) + 1) & (size - 1); // Reallocate the buffer if needed. while (space < align_size + size + additional_bytes) { int old_buf_size = bb.capacity(); bb = growByteBuffer(bb); space += bb.capacity() - old_buf_size; } pad(align_size); } /** * Add a `boolean` to the buffer, backwards from the current location. Doesn't align nor * check for space. * * @param x A `boolean` to put into the buffer. */ public void putBoolean(boolean x) { bb.put (space -= Constants.SIZEOF_BYTE, (byte)(x ? 1 : 0)); } /** * Add a `byte` to the buffer, backwards from the current location. Doesn't align nor * check for space. * * @param x A `byte` to put into the buffer. */ public void putByte (byte x) { bb.put (space -= Constants.SIZEOF_BYTE, x); } /** * Add a `short` to the buffer, backwards from the current location. Doesn't align nor * check for space. * * @param x A `short` to put into the buffer. */ public void putShort (short x) { bb.putShort (space -= Constants.SIZEOF_SHORT, x); } /** * Add an `int` to the buffer, backwards from the current location. Doesn't align nor * check for space. * * @param x An `int` to put into the buffer. */ public void putInt (int x) { bb.putInt (space -= Constants.SIZEOF_INT, x); } /** * Add a `long` to the buffer, backwards from the current location. Doesn't align nor * check for space. * * @param x A `long` to put into the buffer. */ public void putLong (long x) { bb.putLong (space -= Constants.SIZEOF_LONG, x); } /** * Add a `float` to the buffer, backwards from the current location. Doesn't align nor * check for space. * * @param x A `float` to put into the buffer. */ public void putFloat (float x) { bb.putFloat (space -= Constants.SIZEOF_FLOAT, x); } /** * Add a `double` to the buffer, backwards from the current location. Doesn't align nor * check for space. * * @param x A `double` to put into the buffer. */ public void putDouble (double x) { bb.putDouble(space -= Constants.SIZEOF_DOUBLE, x); } /// @endcond /** * Add a `boolean` to the buffer, properly aligned, and grows the buffer (if necessary). * * @param x A `boolean` to put into the buffer. */ public void addBoolean(boolean x) { prep(Constants.SIZEOF_BYTE, 0); putBoolean(x); } /** * Add a `byte` to the buffer, properly aligned, and grows the buffer (if necessary). * * @param x A `byte` to put into the buffer. */ public void addByte (byte x) { prep(Constants.SIZEOF_BYTE, 0); putByte (x); } /** * Add a `short` to the buffer, properly aligned, and grows the buffer (if necessary). * * @param x A `short` to put into the buffer. */ public void addShort (short x) { prep(Constants.SIZEOF_SHORT, 0); putShort (x); } /** * Add an `int` to the buffer, properly aligned, and grows the buffer (if necessary). * * @param x An `int` to put into the buffer. */ public void addInt (int x) { prep(Constants.SIZEOF_INT, 0); putInt (x); } /** * Add a `long` to the buffer, properly aligned, and grows the buffer (if necessary). * * @param x A `long` to put into the buffer. */ public void addLong (long x) { prep(Constants.SIZEOF_LONG, 0); putLong (x); } /** * Add a `float` to the buffer, properly aligned, and grows the buffer (if necessary). * * @param x A `float` to put into the buffer. */ public void addFloat (float x) { prep(Constants.SIZEOF_FLOAT, 0); putFloat (x); } /** * Add a `double` to the buffer, properly aligned, and grows the buffer (if necessary). * * @param x A `double` to put into the buffer. */ public void addDouble (double x) { prep(Constants.SIZEOF_DOUBLE, 0); putDouble (x); } /** * Adds on offset, relative to where it will be written. * * @param off The offset to add. */ public void addOffset(int off) { prep(SIZEOF_INT, 0); // Ensure alignment is already done. assert off <= offset(); off = offset() - off + SIZEOF_INT; putInt(off); } /// @cond FLATBUFFERS_INTERNAL /** * Start a new array/vector of objects. Users usually will not call * this directly. The `FlatBuffers` compiler will create a start/end * method for vector types in generated code. * <p> * The expected sequence of calls is: * <ol> * <li>Start the array using this method.</li> * <li>Call {@link #addOffset(int)} `num_elems` number of times to set * the offset of each element in the array.</li> * <li>Call {@link #endVector()} to retrieve the offset of the array.</li> * </ol> * <p> * For example, to create an array of strings, do: * <pre>{@code * // Need 10 strings * FlatBufferBuilder builder = new FlatBufferBuilder(existingBuffer); * int[] offsets = new int[10]; * * for (int i = 0; i < 10; i++) { * offsets[i] = fbb.createString(" " + i); * } * * // Have the strings in the buffer, but don't have a vector. * // Add a vector that references the newly created strings: * builder.startVector(4, offsets.length, 4); * * // Add each string to the newly created vector * // The strings are added in reverse order since the buffer * // is filled in back to front * for (int i = offsets.length - 1; i >= 0; i--) { * builder.addOffset(offsets[i]); * } * * // Finish off the vector * int offsetOfTheVector = fbb.endVector(); * }</pre> * * @param elem_size The size of each element in the array. * @param num_elems The number of elements in the array. * @param alignment The alignment of the array. */ public void startVector(int elem_size, int num_elems, int alignment) { notNested(); vector_num_elems = num_elems; prep(SIZEOF_INT, elem_size * num_elems); prep(alignment, elem_size * num_elems); // Just in case alignment > int. nested = true; } /** * Finish off the creation of an array and all its elements. The array * must be created with {@link #startVector(int, int, int)}. * * @return The offset at which the newly created array starts. * @see #startVector(int, int, int) */ public int endVector() { if (!nested) throw new AssertionError("FlatBuffers: endVector called without startVector"); nested = false; putInt(vector_num_elems); return offset(); } /// @endcond /** * Create a new array/vector and return a ByteBuffer to be filled later. * Call {@link #endVector} after this method to get an offset to the beginning * of vector. * * @param elem_size the size of each element in bytes. * @param num_elems number of elements in the vector. * @param alignment byte alignment. * @return ByteBuffer with position and limit set to the space allocated for the array. */ public ByteBuffer createUnintializedVector(int elem_size, int num_elems, int alignment) { int length = elem_size * num_elems; startVector(elem_size, num_elems, alignment); bb.position(space -= length); // Slice and limit the copy vector to point to the 'array' ByteBuffer copy = bb.slice().order(ByteOrder.LITTLE_ENDIAN); copy.limit(length); return copy; } /** * Create a vector of tables. * * @param offsets Offsets of the tables. * @return Returns offset of the vector. */ public int createVectorOfTables(int[] offsets) { notNested(); startVector(Constants.SIZEOF_INT, offsets.length, Constants.SIZEOF_INT); for(int i = offsets.length - 1; i >= 0; i--) addOffset(offsets[i]); return endVector(); } /** * Create a vector of sorted by the key tables. * * @param obj Instance of the table subclass. * @param offsets Offsets of the tables. * @return Returns offset of the sorted vector. */ public <T extends Table> int createSortedVectorOfTables(T obj, int[] offsets) { obj.sortTables(offsets, bb); return createVectorOfTables(offsets); } /** * Encode the string `s` in the buffer using UTF-8. If {@code s} is * already a {@link CharBuffer}, this method is allocation free. * * @param s The string to encode. * @return The offset in the buffer where the encoded string starts. */ public int createString(CharSequence s) { int length = s.length(); int estimatedDstCapacity = (int) (length * encoder.maxBytesPerChar()); if (dst == null || dst.capacity() < estimatedDstCapacity) { dst = ByteBuffer.allocate(Math.max(128, estimatedDstCapacity)); } dst.clear(); CharBuffer src = s instanceof CharBuffer ? (CharBuffer) s : CharBuffer.wrap(s); CoderResult result = encoder.encode(src, dst, true); if (result.isError()) { try { result.throwException(); } catch (CharacterCodingException x) { throw new Error(x); } } dst.flip(); return createString(dst); } /** * Create a string in the buffer from an already encoded UTF-8 string in a ByteBuffer. * * @param s An already encoded UTF-8 string as a `ByteBuffer`. * @return The offset in the buffer where the encoded string starts. */ public int createString(ByteBuffer s) { int length = s.remaining(); addByte((byte)0); startVector(1, length, 1); bb.position(space -= length); bb.put(s); return endVector(); } /** * Create a byte array in the buffer. * * @param arr A source array with data * @return The offset in the buffer where the encoded array starts. */ public int createByteVector(byte[] arr) { int length = arr.length; startVector(1, length, 1); bb.position(space -= length); bb.put(arr); return endVector(); } /// @cond FLATBUFFERS_INTERNAL /** * Should not be accessing the final buffer before it is finished. */ public void finished() { if (!finished) throw new AssertionError( "FlatBuffers: you can only access the serialized buffer after it has been" + " finished by FlatBufferBuilder.finish()."); } /** * Should not be creating any other object, string or vector * while an object is being constructed. */ public void notNested() { if (nested) throw new AssertionError("FlatBuffers: object serialization must not be nested."); } /** * Structures are always stored inline, they need to be created right * where they're used. You'll get this assertion failure if you * created it elsewhere. * * @param obj The offset of the created object. */ public void Nested(int obj) { if (obj != offset()) throw new AssertionError("FlatBuffers: struct must be serialized inline."); } /** * Start encoding a new object in the buffer. Users will not usually need to * call this directly. The `FlatBuffers` compiler will generate helper methods * that call this method internally. * <p> * For example, using the "Monster" code found on the "landing page". An * object of type `Monster` can be created using the following code: * * <pre>{@code * int testArrayOfString = Monster.createTestarrayofstringVector(fbb, new int[] { * fbb.createString("test1"), * fbb.createString("test2") * }); * * Monster.startMonster(fbb); * Monster.addPos(fbb, Vec3.createVec3(fbb, 1.0f, 2.0f, 3.0f, 3.0, * Color.Green, (short)5, (byte)6)); * Monster.addHp(fbb, (short)80); * Monster.addName(fbb, str); * Monster.addInventory(fbb, inv); * Monster.addTestType(fbb, (byte)Any.Monster); * Monster.addTest(fbb, mon2); * Monster.addTest4(fbb, test4); * Monster.addTestarrayofstring(fbb, testArrayOfString); * int mon = Monster.endMonster(fbb); * }</pre> * <p> * Here: * <ul> * <li>The call to `Monster#startMonster(FlatBufferBuilder)` will call this * method with the right number of fields set.</li> * <li>`Monster#endMonster(FlatBufferBuilder)` will ensure {@link #endObject()} is called.</li> * </ul> * <p> * It's not recommended to call this method directly. If it's called manually, you must ensure * to audit all calls to it whenever fields are added or removed from your schema. This is * automatically done by the code generated by the `FlatBuffers` compiler. * * @param numfields The number of fields found in this object. */ public void startObject(int numfields) { notNested(); if (vtable == null || vtable.length < numfields) vtable = new int[numfields]; vtable_in_use = numfields; Arrays.fill(vtable, 0, vtable_in_use, 0); nested = true; object_start = offset(); } /** * Add a `boolean` to a table at `o` into its vtable, with value `x` and default `d`. * * @param o The index into the vtable. * @param x A `boolean` to put into the buffer, depending on how defaults are handled. If * `force_defaults` is `false`, compare `x` against the default value `d`. If `x` contains the * default value, it can be skipped. * @param d A `boolean` default value to compare against when `force_defaults` is `false`. */ public void addBoolean(int o, boolean x, boolean d) { if(force_defaults || x != d) { addBoolean(x); slot(o); } } /** * Add a `byte` to a table at `o` into its vtable, with value `x` and default `d`. * * @param o The index into the vtable. * @param x A `byte` to put into the buffer, depending on how defaults are handled. If * `force_defaults` is `false`, compare `x` against the default value `d`. If `x` contains the * default value, it can be skipped. * @param d A `byte` default value to compare against when `force_defaults` is `false`. */ public void addByte (int o, byte x, int d) { if(force_defaults || x != d) { addByte (x); slot(o); } } /** * Add a `short` to a table at `o` into its vtable, with value `x` and default `d`. * * @param o The index into the vtable. * @param x A `short` to put into the buffer, depending on how defaults are handled. If * `force_defaults` is `false`, compare `x` against the default value `d`. If `x` contains the * default value, it can be skipped. * @param d A `short` default value to compare against when `force_defaults` is `false`. */ public void addShort (int o, short x, int d) { if(force_defaults || x != d) { addShort (x); slot(o); } } /** * Add an `int` to a table at `o` into its vtable, with value `x` and default `d`. * * @param o The index into the vtable. * @param x An `int` to put into the buffer, depending on how defaults are handled. If * `force_defaults` is `false`, compare `x` against the default value `d`. If `x` contains the * default value, it can be skipped. * @param d An `int` default value to compare against when `force_defaults` is `false`. */ public void addInt (int o, int x, int d) { if(force_defaults || x != d) { addInt (x); slot(o); } } /** * Add a `long` to a table at `o` into its vtable, with value `x` and default `d`. * * @param o The index into the vtable. * @param x A `long` to put into the buffer, depending on how defaults are handled. If * `force_defaults` is `false`, compare `x` against the default value `d`. If `x` contains the * default value, it can be skipped. * @param d A `long` default value to compare against when `force_defaults` is `false`. */ public void addLong (int o, long x, long d) { if(force_defaults || x != d) { addLong (x); slot(o); } } /** * Add a `float` to a table at `o` into its vtable, with value `x` and default `d`. * * @param o The index into the vtable. * @param x A `float` to put into the buffer, depending on how defaults are handled. If * `force_defaults` is `false`, compare `x` against the default value `d`. If `x` contains the * default value, it can be skipped. * @param d A `float` default value to compare against when `force_defaults` is `false`. */ public void addFloat (int o, float x, double d) { if(force_defaults || x != d) { addFloat (x); slot(o); } } /** * Add a `double` to a table at `o` into its vtable, with value `x` and default `d`. * * @param o The index into the vtable. * @param x A `double` to put into the buffer, depending on how defaults are handled. If * `force_defaults` is `false`, compare `x` against the default value `d`. If `x` contains the * default value, it can be skipped. * @param d A `double` default value to compare against when `force_defaults` is `false`. */ public void addDouble (int o, double x, double d) { if(force_defaults || x != d) { addDouble (x); slot(o); } } /** * Add an `offset` to a table at `o` into its vtable, with value `x` and default `d`. * * @param o The index into the vtable. * @param x An `offset` to put into the buffer, depending on how defaults are handled. If * `force_defaults` is `false`, compare `x` against the default value `d`. If `x` contains the * default value, it can be skipped. * @param d An `offset` default value to compare against when `force_defaults` is `false`. */ public void addOffset (int o, int x, int d) { if(force_defaults || x != d) { addOffset (x); slot(o); } } /** * Add a struct to the table. Structs are stored inline, so nothing additional is being added. * * @param voffset The index into the vtable. * @param x The offset of the created struct. * @param d The default value is always `0`. */ public void addStruct(int voffset, int x, int d) { if(x != d) { Nested(x); slot(voffset); } } /** * Set the current vtable at `voffset` to the current location in the buffer. * * @param voffset The index into the vtable to store the offset relative to the end of the * buffer. */ public void slot(int voffset) { vtable[voffset] = offset(); } /** * Finish off writing the object that is under construction. * * @return The offset to the object inside {@link #dataBuffer()}. * @see #startObject(int) */ public int endObject() { if (vtable == null || !nested) throw new AssertionError("FlatBuffers: endObject called without startObject"); addInt(0); int vtableloc = offset(); // Write out the current vtable. for (int i = vtable_in_use - 1; i >= 0 ; i--) { // Offset relative to the start of the table. short off = (short)(vtable[i] != 0 ? vtableloc - vtable[i] : 0); addShort(off); } final int standard_fields = 2; // The fields below: addShort((short)(vtableloc - object_start)); addShort((short)((vtable_in_use + standard_fields) * SIZEOF_SHORT)); // Search for an existing vtable that matches the current one. int existing_vtable = 0; outer_loop: for (int i = 0; i < num_vtables; i++) { int vt1 = bb.capacity() - vtables[i]; int vt2 = space; short len = bb.getShort(vt1); if (len == bb.getShort(vt2)) { for (int j = SIZEOF_SHORT; j < len; j += SIZEOF_SHORT) { if (bb.getShort(vt1 + j) != bb.getShort(vt2 + j)) { continue outer_loop; } } existing_vtable = vtables[i]; break outer_loop; } } if (existing_vtable != 0) { // Found a match: // Remove the current vtable. space = bb.capacity() - vtableloc; // Point table to existing vtable. bb.putInt(space, existing_vtable - vtableloc); } else { // No match: // Add the location of the current vtable to the list of vtables. if (num_vtables == vtables.length) vtables = Arrays.copyOf(vtables, num_vtables * 2); vtables[num_vtables++] = offset(); // Point table to current vtable. bb.putInt(bb.capacity() - vtableloc, offset() - vtableloc); } nested = false; return vtableloc; } /** * Checks that a required field has been set in a given table that has * just been constructed. * * @param table The offset to the start of the table from the `ByteBuffer` capacity. * @param field The offset to the field in the vtable. */ public void required(int table, int field) { int table_start = bb.capacity() - table; int vtable_start = table_start - bb.getInt(table_start); boolean ok = bb.getShort(vtable_start + field) != 0; // If this fails, the caller will show what field needs to be set. if (!ok) throw new AssertionError("FlatBuffers: field " + field + " must be set"); } /// @endcond /** * Finalize a buffer, pointing to the given `root_table`. * * @param root_table An offset to be added to the buffer. */ public void finish(int root_table) { prep(minalign, SIZEOF_INT); addOffset(root_table); bb.position(space); finished = true; } /** * Finalize a buffer, pointing to the given `root_table`. * * @param root_table An offset to be added to the buffer. * @param file_identifier A FlatBuffer file identifier to be added to the buffer before * `root_table`. */ public void finish(int root_table, String file_identifier) { prep(minalign, SIZEOF_INT + FILE_IDENTIFIER_LENGTH); if (file_identifier.length() != FILE_IDENTIFIER_LENGTH) throw new AssertionError("FlatBuffers: file identifier must be length " + FILE_IDENTIFIER_LENGTH); for (int i = FILE_IDENTIFIER_LENGTH - 1; i >= 0; i--) { addByte((byte)file_identifier.charAt(i)); } finish(root_table); } /** * In order to save space, fields that are set to their default value * don't get serialized into the buffer. Forcing defaults provides a * way to manually disable this optimization. * * @param forceDefaults When set to `true`, always serializes default values. * @return Returns `this`. */ public FlatBufferBuilder forceDefaults(boolean forceDefaults){ this.force_defaults = forceDefaults; return this; } /** * Get the ByteBuffer representing the FlatBuffer. Only call this after you've * called `finish()`. The actual data starts at the ByteBuffer's current position, * not necessarily at `0`. * * @return The {@link ByteBuffer} representing the FlatBuffer */ public ByteBuffer dataBuffer() { finished(); return bb; } /** * The FlatBuffer data doesn't start at offset 0 in the {@link ByteBuffer}, but * now the {@code ByteBuffer}'s position is set to that location upon {@link #finish(int)}. * * @return The {@link ByteBuffer#position() position} the data starts in {@link #dataBuffer()} * @deprecated This method should not be needed anymore, but is left * here for the moment to document this API change. It will be removed in the future. */ @Deprecated private int dataStart() { finished(); return space; } /** * A utility function to copy and return the ByteBuffer data from `start` to * `start` + `length` as a `byte[]`. * * @param start Start copying at this offset. * @param length How many bytes to copy. * @return A range copy of the {@link #dataBuffer() data buffer}. * @throws IndexOutOfBoundsException If the range of bytes is ouf of bound. */ public byte[] sizedByteArray(int start, int length){ finished(); byte[] array = new byte[length]; bb.position(start); bb.get(array); return array; } /** * A utility function to copy and return the ByteBuffer data as a `byte[]`. * * @return A full copy of the {@link #dataBuffer() data buffer}. */ public byte[] sizedByteArray() { return sizedByteArray(space, bb.capacity() - space); } } /// @}
/* * */ package org.exparity.data; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.Validate; import org.exparity.data.html.Anchor; import org.exparity.data.html.DuplicateTagException; import org.exparity.data.html.HtmlParserTag; import org.exparity.data.html.HtmlSelector; import org.exparity.data.html.Tag; import org.exparity.data.types.Array; import org.exparity.data.types.Table; import org.exparity.io.TextDataSource; import org.exparity.io.classpath.JcpFile; import org.exparity.io.filesystem.FileSystemFile; import org.exparity.io.internet.InternetFile; import org.htmlparser.NodeFilter; import org.htmlparser.Parser; import org.htmlparser.lexer.Lexer; import org.htmlparser.lexer.Page; import org.htmlparser.nodes.TagNode; import org.htmlparser.util.NodeList; import org.htmlparser.util.ParserException; import org.htmlparser.util.SimpleNodeIterator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * {@link HTML} models a HTML document. Instantiate using the factory methods. * * @author Stewart Bissett */ public final class HTML extends Text { private static final Logger LOG = LoggerFactory.getLogger(HTML.class); // private static final XmlFactory<HTML> DEFAULT_XML_CONVERTER = new HtmlCleanerXmlFactory(); // Tags public static final String ANCHOR_TAG = "a"; public static final String BREAK_TAG = "br"; public static final String DEFINITION_LIST_TAG = "dl"; public static final String HEAD_TAG = "head"; public static final String META_TAG = "meta"; public static final String ORDERED_LIST_TAG = "ol"; public static final String P_TAG = "p"; public static final String SELECT_TAG = "select"; public static final String SPAN_TAG = "span"; public static final String TABLE_DATA_TAG = "td"; public static final String TABLE_HEADER = "thead"; public static final String TABLE_HEADER_DATA_TAG = "th"; public static final String TABLE_ROW_TAG = "tr"; public static final String TABLE_TAG = "table"; public static final String TD_TAG = "td"; public static final String TITLE_TAG = "title"; public static final String UNORDERED_LIST_TAG = "ul"; // Attributes public static final String CONTENT_ATTRIBUTE = "content"; public static final String HTTP_EQUIV_ATTRIBUTE = "http-equiv"; public static final String HREF_ATTRIBUTE = "href"; // Headers public static final String CONTENT_TYPE_HEADER = "Content-Type"; public static final String LOCATION_HEADER = "Location"; /** * Return an empty {@link HTML} */ public static HTML empty() { return new HTML(new Page(), new NodeList()); } /** * Factory method for creating a {@link HTML} from a file * * @return A {@link HTML} document */ public static HTML openResource(final String resource) throws IOException { return HTML.read(JcpFile.open(resource)); } /** * Factory method for creating a {@link HTML} from a file * * @return A {@link HTML} document */ public static HTML openResource(final String resource, final Class<?> klass) throws IOException { return HTML.read(JcpFile.open(resource, klass)); } /** * Factory method for creating a {@link HTML} from a file * * @return A {@link HTML} document */ public static HTML openResource(final String resource, final ClassLoader loader) throws IOException { return HTML.read(JcpFile.open(resource, loader)); } /** * Factory method for creating a {@link HTML} from a file * * @return A {@link HTML} document */ public static HTML openFile(final String file) throws IOException { return HTML.read(FileSystemFile.open(file)); } /** * Factory method for creating a {@link HTML} from a URL. * * @return A {@link HTML} document */ public static HTML openFile(final File file) throws IOException { return HTML.read(FileSystemFile.open(file)); } /** * Factory method for creating a {@link HTML} from a URL. * * @return A {@link HTML} document */ public static HTML openURL(final String url) throws IOException { return HTML.read(InternetFile.open(url)); } /** * Factory method for creating a {@link HTML} from a URL. * * @return A {@link HTML} document */ public static HTML openURL(final URL url) throws FileNotFoundException { return HTML.read(InternetFile.open(url)); } /** * Parse a {@link HTML} file from a {@link TextDataSource} instance such as * {@link org.exparity.io.classpath.JcpFile} */ public static HTML read(final TextDataSource source) { return HTML.read(source.getStream()); } /** * Read a {@link HTML} from an input stream */ public static HTML read(final InputStream is) { return HTML.read(is, "UTF-8"); } /** * Read a {@link HTML} from an input stream */ public static HTML read(final InputStream is, final String charset) { Page page; try { page = new Page(is, charset); } catch (UnsupportedEncodingException e) { throw new BadFormatException(e); } Parser parser = new Parser(new Lexer(page)); try { NodeList list = parser.parse(null); if (list == null || hasNoTagNodes(list) || startsWithoutAngleBracket(page)) { throw new BadFormatException("Data does not appear to be html", page.getText()); } return new HTML(page, list); } catch (ParserException e) { throw new BadFormatException(e); } finally { parser = null; } } private static boolean startsWithoutAngleBracket(final Page page) { return !page.getText().trim().startsWith("<"); } private static boolean hasNoTagNodes(final NodeList list) { for (SimpleNodeIterator i = list.elements(); i.hasMoreNodes();) { if (TagNode.class.isInstance(i.nextNode())) { return false; } } return true; } private final NodeList nodelist; private final String text; private HTML(final Page page, final NodeList list) { Validate.notNull(page, "Text cannot be null"); Validate.notNull(list, "Node cannot be null"); this.nodelist = list; this.text = page.getText(); } /** * Extract a collection of {@link Tag} instances from the HTML document which match the {@link HtmlSelector} * predicates. */ public List<Tag> findTags(final HtmlSelector... selectors) { NodeList found = findNodes(selectors); if (found == null) { return Collections.emptyList(); } return HtmlParserTag.of(found); } /** * Extract a single {@link Tag} instance from the HTML document which match the {@link HtmlSelector} predicates. If * no match is found then <code>null</code> is returned. */ public Tag findUnique(final HtmlSelector... selectors) { List<Tag> found = findTags(selectors); if (CollectionUtils.isEmpty(found)) { return null; } else if (found.size() == 1) { return found.get(0); } else { throw new DuplicateTagException("Found multiple tags when one or zero was expected"); } } /** * Return the length of the HTML document */ public int getLength() { return text.length(); } /** * Return the text of the HTML document */ @Override public String getText() { return text; } /** * Return if the document is empty */ public boolean isEmpty() { return StringUtils.isBlank(text); } /** * Return the contents of the first TITLE tag from this HTML document, or null if one does not exist */ public String getTitle() { List<Tag> titles = findTags(HtmlSelector.TITLE); return titles.isEmpty() ? null : titles.get(0).getText(); } /** * Extract the TABLE elements from the HTML document which match the {@link HtmlSelector} predicates and return them * as zero or more {@link Table} instances */ public List<Table> findTables(final HtmlSelector... selectors) { List<Table> tables = new ArrayList<>(); for (Tag tag : findTags((HtmlSelector[]) ArrayUtils.add(selectors, HtmlSelector.TABLE))) { try { tables.add(tag.toTable()); } catch (BadConversionException e) { LOG.warn("Unexpected non-table element " + tag + " when selecting tables"); } } return tables; } public List<Array> findArrays(final HtmlSelector... selectors) { List<Array> arrays = new ArrayList<>(); for (Tag tag : findTags((HtmlSelector[]) ArrayUtils.add(selectors, HtmlSelector.ARRAY))) { try { arrays.add(tag.toArray()); } catch (BadConversionException e) { LOG.warn("Unexpected non-array element " + tag + " when selecting arrays"); } } return arrays; } public List<Anchor> findAnchors(final HtmlSelector... selectors) { List<Anchor> anchors = new ArrayList<>(); for (Tag tag : findTags((HtmlSelector[]) ArrayUtils.add(selectors, HtmlSelector.byTagName(HTML.ANCHOR_TAG)))) { final String href = tag.getAttribute(HTML.HREF_ATTRIBUTE); if (StringUtils.isNotEmpty(href)) { anchors.add(new Anchor(href)); } } return anchors; } private NodeList findNodes(final HtmlSelector... selectors) { NodeList found = this.nodelist.extractAllNodesThatMatch(new HtmlParserSelector(selectors), true); if (found == null || found.size() == 0) { return null; } return found; } private class HtmlParserSelector implements NodeFilter { private static final long serialVersionUID = 1L; private final HtmlSelector[] selectors; public HtmlParserSelector(final HtmlSelector[] selectors) { this.selectors = selectors; } @Override public boolean accept(final org.htmlparser.Node node) { if (!(node instanceof org.htmlparser.Tag)) { return false; } org.htmlparser.Tag tag = (org.htmlparser.Tag) node; if (tag.isEndTag()) { return false; } Tag wrapped = HtmlParserTag.of(tag); for (HtmlSelector selector : selectors) { if (!selector.matches(wrapped)) { return false; } } return true; } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/protobuf/wrappers.proto package org.apache.hadoop.hbase.shaded.com.google.protobuf; /** * <pre> * Wrapper message for `bool`. * The JSON representation for `BoolValue` is JSON `true` and `false`. * </pre> * * Protobuf type {@code google.protobuf.BoolValue} */ public final class BoolValue extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.protobuf.BoolValue) BoolValueOrBuilder { // Use BoolValue.newBuilder() to construct. private BoolValue(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BoolValue() { value_ = false; } @java.lang.Override public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private BoolValue( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 8: { value_ = input.readBool(); break; } } } } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { makeExtensionsImmutable(); } } public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BoolValue_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BoolValue_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue.Builder.class); } public static final int VALUE_FIELD_NUMBER = 1; private boolean value_; /** * <pre> * The bool value. * </pre> * * <code>bool value = 1;</code> */ public boolean getValue() { return value_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (value_ != false) { output.writeBool(1, value_); } } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (value_ != false) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeBoolSize(1, value_); } memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue) obj; boolean result = true; result = result && (getValue() == other.getValue()); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + VALUE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( getValue()); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom(byte[] data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom( byte[] data, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseDelimitedFrom( java.io.InputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Wrapper message for `bool`. * The JSON representation for `BoolValue` is JSON `true` and `false`. * </pre> * * Protobuf type {@code google.protobuf.BoolValue} */ public static final class Builder extends org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.protobuf.BoolValue) org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValueOrBuilder { public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BoolValue_descriptor; } protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BoolValue_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue.Builder.class); } // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); value_ = false; return this; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BoolValue_descriptor; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue getDefaultInstanceForType() { return org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue.getDefaultInstance(); } public org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue build() { org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue buildPartial() { org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue(this); result.value_ = value_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue) { return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue other) { if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue.getDefaultInstance()) return this; if (other.getValue() != false) { setValue(other.getValue()); } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private boolean value_ ; /** * <pre> * The bool value. * </pre> * * <code>bool value = 1;</code> */ public boolean getValue() { return value_; } /** * <pre> * The bool value. * </pre> * * <code>bool value = 1;</code> */ public Builder setValue(boolean value) { value_ = value; onChanged(); return this; } /** * <pre> * The bool value. * </pre> * * <code>bool value = 1;</code> */ public Builder clearValue() { value_ = false; onChanged(); return this; } public final Builder setUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:google.protobuf.BoolValue) } // @@protoc_insertion_point(class_scope:google.protobuf.BoolValue) private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue(); } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue getDefaultInstance() { return DEFAULT_INSTANCE; } private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<BoolValue> PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<BoolValue>() { public BoolValue parsePartialFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new BoolValue(input, extensionRegistry); } }; public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<BoolValue> parser() { return PARSER; } @java.lang.Override public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<BoolValue> getParserForType() { return PARSER; } public org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/*! * This program is free software; you can redistribute it and/or modify it under the * terms of the GNU Lesser General Public License, version 2.1 as published by the Free Software * Foundation. * * You should have received a copy of the GNU Lesser General Public License along with this * program; if not, you can obtain a copy at http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html * or from the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * Copyright (c) 2002-2017 Hitachi Vantara.. All rights reserved. */ package org.pentaho.platform.dataaccess.metadata.service; import static javax.ws.rs.core.MediaType.APPLICATION_JSON; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.json.JSONException; import org.pentaho.commons.connection.IPentahoResultSet; import org.pentaho.commons.connection.marshal.MarshallableResultSet; import org.pentaho.metadata.model.Domain; import org.pentaho.metadata.model.LogicalModel; import org.pentaho.metadata.query.model.util.QueryXmlHelper; import org.pentaho.metadata.repository.IMetadataDomainRepository; import org.pentaho.platform.api.engine.ILogger; import org.pentaho.platform.dataaccess.datasource.utils.DataAccessPermissionUtil; import org.pentaho.platform.dataaccess.metadata.messages.Messages; import org.pentaho.platform.dataaccess.metadata.model.impl.Model; import org.pentaho.platform.dataaccess.metadata.model.impl.ModelInfo; import org.pentaho.platform.dataaccess.metadata.model.impl.ModelInfoComparator; import org.pentaho.platform.dataaccess.metadata.model.impl.Query; import org.pentaho.platform.engine.core.system.PentahoBase; import org.pentaho.platform.engine.core.system.PentahoSessionHolder; import org.pentaho.platform.engine.core.system.PentahoSystem; import org.pentaho.platform.plugin.action.pentahometadata.MetadataQueryComponent; import org.pentaho.platform.util.messages.LocaleHelper; import org.pentaho.pms.core.exception.PentahoMetadataException; import flexjson.JSONSerializer; /** * An object that makes lightweight, serializable metadata models available to callers, and allow queries to be * executed. All objects are simple POJOs. This object can be used as a Axis web service. * * @author jamesdixon */ @Path( "/data-access/api/metadataDA" ) public class MetadataService extends PentahoBase { private static final long serialVersionUID = 8481450224870463494L; private Log logger = LogFactory.getLog( MetadataService.class ); public MetadataService() { setLoggingLevel( ILogger.ERROR ); } /** * Returns a string that indicates whether the current user has access to edit or view metadata models * * @return */ @GET @Path( "/getDatasourcePermissions" ) @Produces( { APPLICATION_JSON } ) public String getDatasourcePermissions() { boolean canEdit = hasManageAccess(); boolean canView = hasViewAccess(); if ( canEdit ) { return "EDIT"; //$NON-NLS-1$ } else if ( canView ) { return "VIEW"; //$NON-NLS-1$ } return "NONE"; //$NON-NLS-1$ } /** * Returns a list of the available business models * * @param domainName optional domain to limit the results * @return list of ModelInfo objects representing the available models * @throws IOException */ public ModelInfo[] listBusinessModels( String domainName ) throws IOException { List<ModelInfo> models = new ArrayList<ModelInfo>(); // get hold of the metadata repository IMetadataDomainRepository repo = getMetadataRepository(); if ( repo == null ) { error( Messages.getErrorString( "MetadataService.ERROR_0001_BAD_REPO" ) ); //$NON-NLS-1$ return null; } try { if ( domainName == null ) { // if no domain has been specified, loop over all of them for ( String domain : getMetadataRepository().getDomainIds() ) { getModelInfos( domain, models ); } } else { // get the models for the specified domain getModelInfos( domainName, models ); } } catch ( Throwable t ) { error( Messages.getErrorString( "MetadataService.ERROR_0002_BAD_MODEL_LIST" ), t ); //$NON-NLS-1$ } Collections.sort( models, new ModelInfoComparator() ); return models.toArray( new ModelInfo[ models.size() ] ); } /** * Returns a JSON list of the available business models * * @param domainName optional domain to limit the results * @return JSON string of list of ModelInfo objects representing the available models * @throws IOException */ public String listBusinessModelsJson( String domainName ) throws IOException { ModelInfo[] models = listBusinessModels( domainName ); JSONSerializer serializer = new JSONSerializer(); String json = serializer.deepSerialize( models ); return json; } /** * Returns a list of ModelInfo objects for the specified domain. These objects are small and this list is intended to * allow a client to provide a list of models to a user so the user can pick which one they want to work with. * * @param domain * @param models */ private void getModelInfos( final String domain, List<ModelInfo> models ) { IMetadataDomainRepository repo = getMetadataRepository(); Domain domainObject = repo.getDomain( domain ); // find the best locale String locale = LocaleHelper.getClosestLocale( LocaleHelper.getLocale().toString(), domainObject.getLocaleCodes() ); // iterate over all of the models in this domain for ( LogicalModel model : domainObject.getLogicalModels() ) { // create a new ModelInfo object and give it the envelope information about the model ModelInfo modelInfo = new ModelInfo(); modelInfo.setDomainId( domain ); modelInfo.setModelId( model.getId() ); modelInfo.setModelName( model.getName( locale ) ); if ( model.getDescription() != null ) { String modelDescription = model.getDescription( locale ); modelInfo.setModelDescription( modelDescription ); } models.add( modelInfo ); } return; } /** * Returns a Model object for the requested model. The model will include the basic metadata - categories and * columns. * * @param domainId * @param modelId * @return */ public Model loadModel( String domainId, String modelId ) { if ( domainId == null ) { // we can't do this without a model error( Messages.getErrorString( "MetadataService.ERROR_0003_NULL_DOMAIN" ) ); //$NON-NLS-1$ return null; } if ( modelId == null ) { // we can't do this without a model error( Messages.getErrorString( "MetadataService.ERROR_0004_NULL_Model" ) ); //$NON-NLS-1$ return null; } // because it's lighter weight, check the thin model Domain domain = getMetadataRepository().getDomain( domainId ); if ( domain == null ) { error( Messages.getErrorString( "MetadataService.ERROR_0005_DOMAIN_NOT_FOUND", domainId ) ); //$NON-NLS-1$ return null; } LogicalModel model = domain.findLogicalModel( modelId ); if ( model == null ) { // the model cannot be found or cannot be loaded error( Messages.getErrorString( "MetadataService.ERROR_0006_MODEL_NOT_FOUND", modelId ) ); //$NON-NLS-1$ return null; } // create the thin metadata model and return it MetadataServiceUtil util = getMetadataServiceUtil(); util.setDomain( domain ); Model thinModel = util.createThinModel( model, domainId ); return thinModel; } /** * Returns a JSON Model object for the requested model. The model will include the basic metadata - categories and * columns. * * @param domainId * @param modelId * @return JSON string of the model */ public String loadModelJson( String domainId, String modelId ) { Model model = loadModel( domainId, modelId ); JSONSerializer serializer = new JSONSerializer(); String json = serializer.deepSerialize( model ); return json; } /** * Executes a query model and returns a serializable result set * * @param query * @param rowLimit An optional row limit, -1 or null means all rows * @return */ public MarshallableResultSet doQuery( Query query, Integer rowLimit ) { MetadataServiceUtil util = getMetadataServiceUtil(); org.pentaho.metadata.query.model.Query fullQuery = util.convertQuery( query ); QueryXmlHelper helper = new QueryXmlHelper(); String xml = helper.toXML( fullQuery ); return doXmlQuery( xml, rowLimit ); } /** * Executes a XML query and returns a serializable result set * * @param rowLimit An optional row limit, -1 or null means all rows * @return */ public MarshallableResultSet doXmlQuery( String xml, Integer rowLimit ) { IPentahoResultSet resultSet = executeQuery( xml, rowLimit ); if ( resultSet == null ) { return null; } MarshallableResultSet result = getMarshallableResultSet(); result.setResultSet( resultSet ); return result; } /** * Executes a XML query and returns a JSON serialization of the result set * * @param rowLimit * @return */ public String doXmlQueryToJson( String xml, int rowLimit ) { MarshallableResultSet resultSet = doXmlQuery( xml, rowLimit ); if ( resultSet == null ) { return null; } JSONSerializer serializer = new JSONSerializer(); String json = serializer.deepSerialize( resultSet ); return json; } /** * Executes a XML query and returns a CDA compatible JSON serialization of the result set * * @param rowLimit * @return */ public String doXmlQueryToCdaJson( String xml, int rowLimit ) { IPentahoResultSet resultSet = executeQuery( xml, rowLimit ); if ( resultSet == null ) { return null; } String json = null; try { MetadataServiceUtil util = getMetadataServiceUtil(); Domain domain = util.getDomainObject( xml ); util.setDomain( domain ); String locale = LocaleHelper.getClosestLocale( LocaleHelper.getLocale().toString(), domain.getLocaleCodes() ); json = util.createCdaJson( resultSet, locale ); } catch ( JSONException e ) { error( Messages.getErrorString( "MetadataService.ERROR_0007_JSON_ERROR" ), e ); //$NON-NLS-1$ } catch ( PentahoMetadataException e ) { error( Messages.getErrorString( "MetadataService.ERROR_0007_BAD_QUERY_DOMAIN" ), e ); //$NON-NLS-1$ } return json; } /** * Executes a XML query and returns a serializable result set * * @param rowLimit An optional row limit, -1 or null means all rows * @return */ public MarshallableResultSet doJsonQuery( String json, Integer rowLimit ) { // return the results return doXmlQuery( getQueryXmlFromJson( json ), rowLimit ); } /** * Executes a XML query and returns a JSON serialization of the result set * * @param rowLimit * @return */ public String doJsonQueryToJson( String json, int rowLimit ) { // return the results return doXmlQueryToJson( getQueryXmlFromJson( json ), rowLimit ); } /** * Executes a XML query and returns a CDA compatible JSON serialization of the result set * * @param rowLimit * @return */ public String doJsonQueryToCdaJson( String json, int rowLimit ) { // return the results return doXmlQueryToCdaJson( getQueryXmlFromJson( json ), rowLimit ); } /** * Executes a XML query and returns a native result set * * @param query * @param rowLimit An optional row limit, -1 or null means all rows * @return */ protected IPentahoResultSet executeQuery( String query, Integer rowLimit ) { // create a component to execute the query MetadataQueryComponent dataComponent = new MetadataQueryComponent(); dataComponent.setQuery( query ); dataComponent.setLive( false ); dataComponent.setUseForwardOnlyResultSet( true ); if ( rowLimit != null && rowLimit > -1 ) { // set the row limit dataComponent.setMaxRows( rowLimit ); } if ( dataComponent.execute() ) { return dataComponent.getResultSet(); } return null; } /** * Converts a JSON query into a full Query object by going via a thin Query object * * @param json * @return */ protected String getQueryXmlFromJson( String json ) { MetadataServiceUtil util = getMetadataServiceUtil(); Query query = util.deserializeJsonQuery( json ); try { // convert the thin query model into a full one org.pentaho.metadata.query.model.Query fullQuery = util.convertQuery( query ); // get the XML for the query QueryXmlHelper helper = new QueryXmlHelper(); String xml = helper.toXML( fullQuery ); return xml; } catch ( Exception e ) { error( Messages.getErrorString( "MetadataService.ERROR_0008_BAD_QUERY" ), e ); //$NON-NLS-1$ } return null; } /** * Returns a instance of the IMetadataDomainRepository for the current session * * @return */ protected IMetadataDomainRepository getMetadataRepository() { IMetadataDomainRepository mdr = PentahoSystem.get( IMetadataDomainRepository.class, PentahoSessionHolder.getSession() ); if ( mdr instanceof ILogger ) { ( (ILogger) mdr ).setLoggingLevel( getLoggingLevel() ); } return mdr; } @Override public Log getLogger() { return logger; } protected boolean hasManageAccess() { return DataAccessPermissionUtil.hasManageAccess(); } protected boolean hasViewAccess() { return DataAccessPermissionUtil.hasViewAccess(); } protected MetadataServiceUtil getMetadataServiceUtil() { return new MetadataServiceUtil(); } protected MarshallableResultSet getMarshallableResultSet() { return new MarshallableResultSet(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package groovy.json; import groovy.json.internal.CharBuf; import groovy.json.internal.Chr; import groovy.lang.Closure; import groovy.util.Expando; import org.codehaus.groovy.runtime.DefaultGroovyMethods; import java.io.File; import java.math.BigDecimal; import java.math.BigInteger; import java.net.URL; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Calendar; import java.util.Collections; import java.util.Date; import java.util.Enumeration; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.TimeZone; import java.util.UUID; import static groovy.json.JsonOutput.CLOSE_BRACE; import static groovy.json.JsonOutput.CLOSE_BRACKET; import static groovy.json.JsonOutput.COMMA; import static groovy.json.JsonOutput.EMPTY_LIST_CHARS; import static groovy.json.JsonOutput.EMPTY_MAP_CHARS; import static groovy.json.JsonOutput.EMPTY_STRING_CHARS; import static groovy.json.JsonOutput.OPEN_BRACE; import static groovy.json.JsonOutput.OPEN_BRACKET; /** * A JsonGenerator that can be configured with various {@link JsonGenerator.Options}. * If the default options are sufficient consider using the static {@code JsonOutput.toJson} * methods. * * @see JsonGenerator.Options#build() * @since 2.5.0 */ public class DefaultJsonGenerator implements JsonGenerator { protected final boolean excludeNulls; protected final boolean disableUnicodeEscaping; protected final String dateFormat; protected final Locale dateLocale; protected final TimeZone timezone; protected final Set<Converter> converters = new LinkedHashSet<Converter>(); protected final Set<String> excludedFieldNames = new HashSet<String>(); protected final Set<Class<?>> excludedFieldTypes = new HashSet<Class<?>>(); protected DefaultJsonGenerator(Options options) { excludeNulls = options.excludeNulls; disableUnicodeEscaping = options.disableUnicodeEscaping; dateFormat = options.dateFormat; dateLocale = options.dateLocale; timezone = options.timezone; if (!options.converters.isEmpty()) { converters.addAll(options.converters); } if (!options.excludedFieldNames.isEmpty()) { excludedFieldNames.addAll(options.excludedFieldNames); } if (!options.excludedFieldTypes.isEmpty()) { excludedFieldTypes.addAll(options.excludedFieldTypes); } } /** * {@inheritDoc} */ @Override public String toJson(Object object) { CharBuf buffer = CharBuf.create(255); writeObject(object, buffer); return buffer.toString(); } /** * {@inheritDoc} */ @Override public boolean isExcludingFieldsNamed(String name) { return excludedFieldNames.contains(name); } /** * {@inheritDoc} */ @Override public boolean isExcludingValues(Object value) { if (value == null) { return excludeNulls; } else { return shouldExcludeType(value.getClass()); } } /** * Serializes Number value and writes it into specified buffer. */ protected void writeNumber(Class<?> numberClass, Number value, CharBuf buffer) { if (numberClass == Integer.class) { buffer.addInt((Integer) value); } else if (numberClass == Long.class) { buffer.addLong((Long) value); } else if (numberClass == BigInteger.class) { buffer.addBigInteger((BigInteger) value); } else if (numberClass == BigDecimal.class) { buffer.addBigDecimal((BigDecimal) value); } else if (numberClass == Double.class) { Double doubleValue = (Double) value; if (doubleValue.isInfinite()) { throw new JsonException("Number " + value + " can't be serialized as JSON: infinite are not allowed in JSON."); } if (doubleValue.isNaN()) { throw new JsonException("Number " + value + " can't be serialized as JSON: NaN are not allowed in JSON."); } buffer.addDouble(doubleValue); } else if (numberClass == Float.class) { Float floatValue = (Float) value; if (floatValue.isInfinite()) { throw new JsonException("Number " + value + " can't be serialized as JSON: infinite are not allowed in JSON."); } if (floatValue.isNaN()) { throw new JsonException("Number " + value + " can't be serialized as JSON: NaN are not allowed in JSON."); } buffer.addFloat(floatValue); } else if (numberClass == Byte.class) { buffer.addByte((Byte) value); } else if (numberClass == Short.class) { buffer.addShort((Short) value); } else { // Handle other Number implementations buffer.addString(value.toString()); } } protected void writeObject(Object object, CharBuf buffer) { writeObject(null, object, buffer); } /** * Serializes object and writes it into specified buffer. */ protected void writeObject(String key, Object object, CharBuf buffer) { if (isExcludingValues(object)) { return; } if (object == null) { buffer.addNull(); return; } Class<?> objectClass = object.getClass(); Converter converter = findConverter(objectClass); if (converter != null) { object = converter.convert(object, key); objectClass = object.getClass(); } if (CharSequence.class.isAssignableFrom(objectClass)) { // Handle String, StringBuilder, GString and other CharSequence implementations writeCharSequence((CharSequence) object, buffer); } else if (objectClass == Boolean.class) { buffer.addBoolean((Boolean) object); } else if (Number.class.isAssignableFrom(objectClass)) { writeNumber(objectClass, (Number) object, buffer); } else if (Date.class.isAssignableFrom(objectClass)) { writeDate((Date) object, buffer); } else if (Calendar.class.isAssignableFrom(objectClass)) { writeDate(((Calendar) object).getTime(), buffer); } else if (Map.class.isAssignableFrom(objectClass)) { writeMap((Map) object, buffer); } else if (Iterable.class.isAssignableFrom(objectClass)) { writeIterator(((Iterable<?>) object).iterator(), buffer); } else if (Iterator.class.isAssignableFrom(objectClass)) { writeIterator((Iterator) object, buffer); } else if (objectClass == Character.class) { buffer.addJsonEscapedString(Chr.array((Character) object), disableUnicodeEscaping); } else if (objectClass == URL.class) { buffer.addJsonEscapedString(object.toString(), disableUnicodeEscaping); } else if (objectClass == UUID.class) { buffer.addQuoted(object.toString()); } else if (objectClass == JsonOutput.JsonUnescaped.class) { buffer.add(object.toString()); } else if (Closure.class.isAssignableFrom(objectClass)) { writeMap(JsonDelegate.cloneDelegateAndGetContent((Closure<?>) object), buffer); } else if (Expando.class.isAssignableFrom(objectClass)) { writeMap(((Expando) object).getProperties(), buffer); } else if (Enumeration.class.isAssignableFrom(objectClass)) { List<?> list = Collections.list((Enumeration<?>) object); writeIterator(list.iterator(), buffer); } else if (objectClass.isArray()) { writeArray(objectClass, object, buffer); } else if (Enum.class.isAssignableFrom(objectClass)) { buffer.addQuoted(((Enum<?>) object).name()); } else if (File.class.isAssignableFrom(objectClass)) { Map<?, ?> properties = getObjectProperties(object); //Clean up all recursive references to File objects Iterator<? extends Map.Entry<?, ?>> iterator = properties.entrySet().iterator(); while(iterator.hasNext()) { Map.Entry<?,?> entry = iterator.next(); if(entry.getValue() instanceof File) { iterator.remove(); } } writeMap(properties, buffer); } else { Map<?, ?> properties = getObjectProperties(object); writeMap(properties, buffer); } } protected Map<?, ?> getObjectProperties(Object object) { Map<?, ?> properties = DefaultGroovyMethods.getProperties(object); properties.remove("class"); properties.remove("declaringClass"); properties.remove("metaClass"); return properties; } /** * Serializes any char sequence and writes it into specified buffer. */ protected void writeCharSequence(CharSequence seq, CharBuf buffer) { if (seq.length() > 0) { buffer.addJsonEscapedString(seq.toString(), disableUnicodeEscaping); } else { buffer.addChars(EMPTY_STRING_CHARS); } } /** * Serializes any char sequence and writes it into specified buffer * without performing any manipulation of the given text. */ protected void writeRaw(CharSequence seq, CharBuf buffer) { if (seq != null) { buffer.add(seq.toString()); } } /** * Serializes date and writes it into specified buffer. */ protected void writeDate(Date date, CharBuf buffer) { SimpleDateFormat formatter = new SimpleDateFormat(dateFormat, dateLocale); formatter.setTimeZone(timezone); buffer.addQuoted(formatter.format(date)); } /** * Serializes array and writes it into specified buffer. */ protected void writeArray(Class<?> arrayClass, Object array, CharBuf buffer) { if (Object[].class.isAssignableFrom(arrayClass)) { Object[] objArray = (Object[]) array; writeIterator(Arrays.asList(objArray).iterator(), buffer); return; } buffer.addChar(OPEN_BRACKET); if (int[].class.isAssignableFrom(arrayClass)) { int[] intArray = (int[]) array; if (intArray.length > 0) { buffer.addInt(intArray[0]); for (int i = 1; i < intArray.length; i++) { buffer.addChar(COMMA).addInt(intArray[i]); } } } else if (long[].class.isAssignableFrom(arrayClass)) { long[] longArray = (long[]) array; if (longArray.length > 0) { buffer.addLong(longArray[0]); for (int i = 1; i < longArray.length; i++) { buffer.addChar(COMMA).addLong(longArray[i]); } } } else if (boolean[].class.isAssignableFrom(arrayClass)) { boolean[] booleanArray = (boolean[]) array; if (booleanArray.length > 0) { buffer.addBoolean(booleanArray[0]); for (int i = 1; i < booleanArray.length; i++) { buffer.addChar(COMMA).addBoolean(booleanArray[i]); } } } else if (char[].class.isAssignableFrom(arrayClass)) { char[] charArray = (char[]) array; if (charArray.length > 0) { buffer.addJsonEscapedString(Chr.array(charArray[0]), disableUnicodeEscaping); for (int i = 1; i < charArray.length; i++) { buffer.addChar(COMMA).addJsonEscapedString(Chr.array(charArray[i]), disableUnicodeEscaping); } } } else if (double[].class.isAssignableFrom(arrayClass)) { double[] doubleArray = (double[]) array; if (doubleArray.length > 0) { buffer.addDouble(doubleArray[0]); for (int i = 1; i < doubleArray.length; i++) { buffer.addChar(COMMA).addDouble(doubleArray[i]); } } } else if (float[].class.isAssignableFrom(arrayClass)) { float[] floatArray = (float[]) array; if (floatArray.length > 0) { buffer.addFloat(floatArray[0]); for (int i = 1; i < floatArray.length; i++) { buffer.addChar(COMMA).addFloat(floatArray[i]); } } } else if (byte[].class.isAssignableFrom(arrayClass)) { byte[] byteArray = (byte[]) array; if (byteArray.length > 0) { buffer.addByte(byteArray[0]); for (int i = 1; i < byteArray.length; i++) { buffer.addChar(COMMA).addByte(byteArray[i]); } } } else if (short[].class.isAssignableFrom(arrayClass)) { short[] shortArray = (short[]) array; if (shortArray.length > 0) { buffer.addShort(shortArray[0]); for (int i = 1; i < shortArray.length; i++) { buffer.addChar(COMMA).addShort(shortArray[i]); } } } buffer.addChar(CLOSE_BRACKET); } /** * Serializes map and writes it into specified buffer. */ protected void writeMap(Map<?, ?> map, CharBuf buffer) { if (map.isEmpty()) { buffer.addChars(EMPTY_MAP_CHARS); return; } buffer.addChar(OPEN_BRACE); for (Map.Entry<?, ?> entry : map.entrySet()) { if (entry.getKey() == null) { throw new IllegalArgumentException("Maps with null keys can\'t be converted to JSON"); } String key = entry.getKey().toString(); Object value = entry.getValue(); if (isExcludingValues(value) || isExcludingFieldsNamed(key)) { continue; } writeMapEntry(key, value, buffer); buffer.addChar(COMMA); } buffer.removeLastChar(COMMA); // dangling comma buffer.addChar(CLOSE_BRACE); } /** * Serializes a map entry and writes it into specified buffer. */ protected void writeMapEntry(String key, Object value, CharBuf buffer) { buffer.addJsonFieldName(key, disableUnicodeEscaping); writeObject(key, value, buffer); } /** * Serializes iterator and writes it into specified buffer. */ protected void writeIterator(Iterator<?> iterator, CharBuf buffer) { if (!iterator.hasNext()) { buffer.addChars(EMPTY_LIST_CHARS); return; } buffer.addChar(OPEN_BRACKET); while (iterator.hasNext()) { Object it = iterator.next(); if (!isExcludingValues(it)) { writeObject(it, buffer); buffer.addChar(COMMA); } } buffer.removeLastChar(COMMA); // dangling comma buffer.addChar(CLOSE_BRACKET); } /** * Finds a converter that can handle the given type. The first converter * that reports it can handle the type is returned, based on the order in * which the converters were specified. A {@code null} value will be returned * if no suitable converter can be found for the given type. * * @param type that this converter can handle * @return first converter that can handle the given type; else {@code null} * if no compatible converters are found for the given type. */ protected Converter findConverter(Class<?> type) { for (Converter c : converters) { if (c.handles(type)) { return c; } } return null; } /** * Indicates whether the given type should be excluded from the generated output. * * @param type the type to check * @return {@code true} if the given type should not be output, else {@code false} */ protected boolean shouldExcludeType(Class<?> type) { for (Class<?> t : excludedFieldTypes) { if (t.isAssignableFrom(type)) { return true; } } return false; } /** * A converter that handles converting a given type using a closure. * * @since 2.5.0 */ protected static class ClosureConverter implements Converter { protected final Class<?> type; protected final Closure<?> closure; protected final int paramCount; protected ClosureConverter(Class<?> type, Closure<?> closure) { if (type == null) { throw new NullPointerException("Type parameter must not be null"); } if (closure == null) { throw new NullPointerException("Closure parameter must not be null"); } int paramCount = closure.getMaximumNumberOfParameters(); if (paramCount < 1) { throw new IllegalArgumentException("Closure must accept at least one parameter"); } Class<?> param1 = closure.getParameterTypes()[0]; if (!param1.isAssignableFrom(type)) { throw new IllegalArgumentException("Expected first parameter to be of type: " + type.toString()); } if (paramCount > 1) { Class<?> param2 = closure.getParameterTypes()[1]; if (!param2.isAssignableFrom(String.class)) { throw new IllegalArgumentException("Expected second parameter to be of type: " + String.class.toString()); } } this.type = type; this.closure = closure; this.paramCount = paramCount; } /** * Returns {@code true} if this converter can handle conversions * of the given type. * * @param type the type of the object to convert * @return true if this converter can successfully convert values of * the given type */ @Override public boolean handles(Class<?> type) { return this.type.isAssignableFrom(type); } /** * Converts a given value. * * @param value the object to convert * @param key the key name for the value, may be {@code null} * @return the converted object */ @Override public Object convert(Object value, String key) { return (paramCount == 1) ? closure.call(value) : closure.call(value, key); } /** * Any two Converter instances registered for the same type are considered * to be equal. This comparison makes managing instances in a Set easier; * since there is no chaining of Converters it makes sense to only allow * one per type. * * @param o the object with which to compare. * @return {@code true} if this object contains the same class; {@code false} otherwise. */ @Override public boolean equals(Object o) { if (o == this) { return true; } if (!(o instanceof ClosureConverter)) { return false; } return this.type == ((ClosureConverter)o).type; } @Override public int hashCode() { return this.type.hashCode(); } @Override public String toString() { return super.toString() + "<" + this.type.toString() + ">"; } } }
/* * Copyright 2018-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.dataflow.server.service.impl; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties; import org.springframework.cloud.dataflow.core.TaskDefinition; import org.springframework.cloud.dataflow.core.dsl.TaskNode; import org.springframework.cloud.dataflow.core.dsl.TaskParser; import org.springframework.cloud.dataflow.server.controller.VisibleProperties; import org.springframework.cloud.deployer.spi.core.AppDefinition; import org.springframework.core.io.Resource; import org.springframework.util.StringUtils; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; /** * Verifies the behavior of the methods in the utility. * * @author Glenn Renfro */ public class TaskServiceUtilsTests { public static final String BASE_GRAPH = "AAA && BBB"; @Rule public ExpectedException expectedException; @Test public void testCreateComposedTaskDefinition() { assertThat(TaskServiceUtils.createComposedTaskDefinition(BASE_GRAPH)).isEqualTo("composed-task-runner --graph=\"AAA && BBB\""); } @Test public void testCreateComposeTaskDefinitionNullNameCheck() { assertThrows(IllegalArgumentException.class, () -> { TaskServiceUtils.createComposedTaskDefinition(BASE_GRAPH); TaskServiceUtils.createComposedTaskDefinition(null); }); } @Test public void testCreateComposeTaskDefinitionNullProperties() { assertThrows(IllegalArgumentException.class, () -> { TaskServiceUtils.createComposedTaskDefinition(BASE_GRAPH, null); }); } @Test public void testCTRPropertyReplacement() { TaskNode node = parse("AAA && BBB"); Map<String, String> taskDeploymentProperties = new HashMap<>(); taskDeploymentProperties.put("app.test.BBB.timestamp.format", "aformat"); taskDeploymentProperties.put("deployer.test.BBB.foo", "bar"); taskDeploymentProperties = TaskServiceUtils.establishComposedTaskProperties( taskDeploymentProperties, node); assertThat(taskDeploymentProperties.size()).isEqualTo(1); assertThat(taskDeploymentProperties.get( "app.composed-task-runner.composed-task-properties")) .isEqualTo("app.test-BBB.app.BBB.timestamp.format=aformat, deployer.test-BBB.deployer.BBB.foo=bar"); } @Test public void testDatabasePropUpdate() { TaskDefinition taskDefinition = new TaskDefinition("testTask", "testApp"); DataSourceProperties dataSourceProperties = getDataSourceProperties(); TaskDefinition definition = TaskServiceUtils.updateTaskProperties( taskDefinition, dataSourceProperties, true); assertThat(definition.getProperties().size()).isEqualTo(5); assertThat(definition.getProperties().get("spring.datasource.url")).isEqualTo("myUrl"); assertThat(definition.getProperties().get("spring.datasource.driverClassName")).isEqualTo("myDriver"); assertThat(definition.getProperties().get("spring.datasource.username")).isEqualTo("myUser"); assertThat(definition.getProperties().get("spring.datasource.password")).isEqualTo("myPassword"); definition = TaskServiceUtils.updateTaskProperties( taskDefinition, dataSourceProperties, false); assertThat(definition.getProperties().size()).isEqualTo(3); assertThat(definition.getProperties().get("spring.datasource.url")).isEqualTo("myUrl"); assertThat(definition.getProperties().get("spring.datasource.driverClassName")).isEqualTo("myDriver"); } @Test public void testDatabasePropUpdateWithPlatform() { TaskDefinition taskDefinition = new TaskDefinition("testTask", "testApp"); DataSourceProperties dataSourceProperties = getDataSourceProperties(); TaskDefinition definition = TaskServiceUtils.updateTaskProperties( taskDefinition, dataSourceProperties, false); validateProperties(definition, 3); assertThat(definition.getProperties().get("spring.datasource.driverClassName")).isEqualTo("myDriver"); } @Test public void testDatabasePropUpdateWithPlatformForUserDriverClassName() { TaskDefinition definition = createUpdatedDefinitionForProperty("spring.datasource.driverClassName", "foobar"); validateProperties(definition, 2); assertThat(definition.getProperties().get("spring.datasource.driverClassName")).isEqualTo("foobar"); definition = createUpdatedDefinitionForProperty("spring.datasource.driver-class-name", "feebar"); validateProperties(definition, 2); assertThat(definition.getProperties().get("spring.datasource.driver-class-name")).isEqualTo("feebar"); definition = createUpdatedDefinitionForProperty(null, null); validateProperties(definition, 2); assertThat(definition.getProperties().get("spring.datasource.driverClassName")).isEqualTo("myDriver"); } @Test public void testDatabasePropUpdateWithPlatformForUrl() { TaskDefinition definition = createUpdatedDefinitionForProperty("spring.datasource.url", "newurl"); assertThat(definition.getProperties().get("spring.datasource.url")).isEqualTo("newurl"); definition = createUpdatedDefinitionForProperty(null, null); assertThat(definition.getProperties().get("spring.datasource.url")).isEqualTo("myUrl"); } private TaskDefinition createUpdatedDefinitionForProperty(String key, String value) { Map<String, String> props = new HashMap<>(); if(StringUtils.hasText(key) && StringUtils.hasText(value)) { props.put(key, value); } TaskDefinition taskDefinition = (new TaskDefinition.TaskDefinitionBuilder()). addProperties(props). setTaskName("testTask"). setRegisteredAppName("testApp"). build(); DataSourceProperties dataSourceProperties = getDataSourceProperties(); return TaskServiceUtils.updateTaskProperties( taskDefinition, dataSourceProperties, false); } private void validateProperties(TaskDefinition definition, int size) { assertThat(definition.getProperties().size()).isEqualTo(size); assertThat(definition.getProperties().get("spring.datasource.url")).isEqualTo("myUrl"); assertThat(definition.getProperties().get("spring.datasource.username")).isNull(); assertThat(definition.getProperties().get("spring.datasource.password")).isNull(); } @Test public void testExtractAppProperties() { Map<String, String> taskDeploymentProperties = new HashMap<>(); taskDeploymentProperties.put("app.test.foo", "bar"); taskDeploymentProperties.put("test.none", "boo"); taskDeploymentProperties.put("app.test.test", "baz"); taskDeploymentProperties.put("app.none.test", "boo"); Map<String, String> result = TaskServiceUtils.extractAppProperties("test", taskDeploymentProperties); assertThat(result.size()).isEqualTo(2); assertThat(result.get("foo")).isEqualTo("bar"); assertThat(result.get("test")).isEqualTo("baz"); } @Test public void testMergeAndExpandAppProperties() { TaskDefinition taskDefinition = new TaskDefinition("testTask", "testApp"); Map<String, String> appDeploymentProperties = new HashMap<>(); appDeploymentProperties.put("propA", "valA"); appDeploymentProperties.put("propB", "valB"); VisibleProperties visibleProperties = mock(VisibleProperties.class); org.mockito.BDDMockito.given(visibleProperties .qualifyProperties(any(), any())) .willReturn(appDeploymentProperties); AppDefinition appDefinition = TaskServiceUtils.mergeAndExpandAppProperties( taskDefinition, mock(Resource.class), appDeploymentProperties, visibleProperties); assertThat(appDefinition.getProperties().size()).isEqualTo(2); assertThat(appDefinition.getProperties().get("propA")).isEqualTo("valA"); assertThat(appDefinition.getProperties().get("propB")).isEqualTo("valB"); } @Test public void testDataFlowUriProperty() throws Exception { final String DATA_FLOW_SERVICE_URI = "https://myserver:9191"; List<String> cmdLineArgs = new ArrayList<>(); Map<String, String> appDeploymentProperties = new HashMap<>(); TaskServiceUtils.updateDataFlowUriIfNeeded(DATA_FLOW_SERVICE_URI, appDeploymentProperties, cmdLineArgs); assertTrue(appDeploymentProperties.containsKey("dataflowServerUri")); assertTrue("dataflowServerUri is expected to be in the app deployment properties", appDeploymentProperties.get("dataflowServerUri").equals("https://myserver:9191")); appDeploymentProperties.clear(); appDeploymentProperties.put("dataflow-server-uri", "http://localhost:8080"); TaskServiceUtils.updateDataFlowUriIfNeeded(DATA_FLOW_SERVICE_URI, appDeploymentProperties, cmdLineArgs); assertTrue(!appDeploymentProperties.containsKey("dataflowServerUri")); assertTrue("dataflowServerUri is incorrect", appDeploymentProperties.get("dataflow-server-uri").equals("http://localhost:8080")); appDeploymentProperties.clear(); appDeploymentProperties.put("dataflowServerUri", "http://localhost:8191"); TaskServiceUtils.updateDataFlowUriIfNeeded(DATA_FLOW_SERVICE_URI, appDeploymentProperties, cmdLineArgs); assertTrue(appDeploymentProperties.containsKey("dataflowServerUri")); assertTrue("dataflowServerUri is incorrect", appDeploymentProperties.get("dataflowServerUri").equals("http://localhost:8191")); appDeploymentProperties.clear(); appDeploymentProperties.put("DATAFLOW_SERVER_URI", "http://localhost:9000"); TaskServiceUtils.updateDataFlowUriIfNeeded(DATA_FLOW_SERVICE_URI, appDeploymentProperties, cmdLineArgs); assertTrue(!appDeploymentProperties.containsKey("dataflowServerUri")); assertTrue("dataflowServerUri is incorrect", appDeploymentProperties.get("DATAFLOW_SERVER_URI").equals("http://localhost:9000")); appDeploymentProperties.clear(); cmdLineArgs.add("--dataflowServerUri=http://localhost:8383"); TaskServiceUtils.updateDataFlowUriIfNeeded(DATA_FLOW_SERVICE_URI, appDeploymentProperties, cmdLineArgs); assertTrue(!appDeploymentProperties.containsKey("dataflowServerUri")); cmdLineArgs.clear(); cmdLineArgs.add("DATAFLOW_SERVER_URI=http://localhost:8383"); TaskServiceUtils.updateDataFlowUriIfNeeded(DATA_FLOW_SERVICE_URI, appDeploymentProperties, cmdLineArgs); assertTrue(!appDeploymentProperties.containsKey("dataflowServerUri")); assertTrue(!appDeploymentProperties.containsKey("DATAFLOW-SERVER-URI")); } @Test public void testAddProvidedImagePullSecret() { ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties = new ComposedTaskRunnerConfigurationProperties(); composedTaskRunnerConfigurationProperties.setImagePullSecret("regcred"); Map<String, String> taskDeploymentProperties = new HashMap<>(); TaskServiceUtils.addImagePullSecretProperty(taskDeploymentProperties, composedTaskRunnerConfigurationProperties); String imagePullSecretPropertyKey = "deployer.composed-task-runner.kubernetes.imagePullSecret"; assertTrue("Task deployment properties are missing composed task runner imagePullSecret", taskDeploymentProperties.containsKey(imagePullSecretPropertyKey)); assertEquals("Invalid imagePullSecret", "regcred", taskDeploymentProperties.get(imagePullSecretPropertyKey)); } @Test public void testComposedTaskRunnerUriFromTaskProps() { ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties = new ComposedTaskRunnerConfigurationProperties(); TaskConfigurationProperties taskConfigurationProperties = new TaskConfigurationProperties(); taskConfigurationProperties.setComposedTaskRunnerConfigurationProperties(composedTaskRunnerConfigurationProperties); taskConfigurationProperties.setComposedTaskRunnerUri("docker://something"); String uri = TaskServiceUtils.getComposedTaskLauncherUri(taskConfigurationProperties, composedTaskRunnerConfigurationProperties); assertEquals("Invalid task runner URI string", "docker://something", uri); } @Test public void testComposedTaskRunnerUriFromCTRProps() { ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties = new ComposedTaskRunnerConfigurationProperties(); composedTaskRunnerConfigurationProperties.setUri("docker://something"); String uri = TaskServiceUtils.getComposedTaskLauncherUri(new TaskConfigurationProperties(), composedTaskRunnerConfigurationProperties); assertEquals("Invalid task runner URI string", "docker://something", uri); } @Test public void testComposedTaskRunnerUriFromCTRPropsOverridesTaskProps() { ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties = new ComposedTaskRunnerConfigurationProperties(); composedTaskRunnerConfigurationProperties.setUri("gcr.io://something"); TaskConfigurationProperties taskConfigurationProperties = new TaskConfigurationProperties(); taskConfigurationProperties.setComposedTaskRunnerConfigurationProperties(composedTaskRunnerConfigurationProperties); taskConfigurationProperties.setComposedTaskRunnerUri("docker://something"); String uri = TaskServiceUtils.getComposedTaskLauncherUri(taskConfigurationProperties, composedTaskRunnerConfigurationProperties); assertEquals("Invalid task runner URI string", "gcr.io://something", uri); } @Test public void testImagePullSecretNullCTRProperties() { Map<String, String> taskDeploymentProperties = new HashMap<>(); TaskServiceUtils.addImagePullSecretProperty(taskDeploymentProperties, null); assertFalse("Task deployment properties should not contain imagePullSecret", taskDeploymentProperties.containsKey("deployer.composed-task-runner.kubernetes.imagePullSecret")); } @Test public void testUseUserAccessTokenFromCTRPropsEnabled() { ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties = new ComposedTaskRunnerConfigurationProperties(); composedTaskRunnerConfigurationProperties.setUseUserAccessToken(true); boolean result = TaskServiceUtils.isUseUserAccessToken(null, composedTaskRunnerConfigurationProperties); assertTrue("Use user access token should be true", result); } @Test public void testUseUserAccessTokenFromCTRPropsDisabled() { ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties = new ComposedTaskRunnerConfigurationProperties(); composedTaskRunnerConfigurationProperties.setUseUserAccessToken(false); boolean result = TaskServiceUtils.isUseUserAccessToken(null, composedTaskRunnerConfigurationProperties); assertFalse("Use user access token should be false", result); } @Test public void testUseUserAccessTokenFromNullCTRProps() { TaskConfigurationProperties taskConfigurationProperties = new TaskConfigurationProperties(); taskConfigurationProperties.setComposedTaskRunnerConfigurationProperties(new ComposedTaskRunnerConfigurationProperties()); boolean result = TaskServiceUtils.isUseUserAccessToken(taskConfigurationProperties, null); assertFalse("Use user access token should be false", result); } @Test public void testUseUserAccessTokenFromTaskProps() { TaskConfigurationProperties taskConfigurationProperties = new TaskConfigurationProperties(); taskConfigurationProperties.setComposedTaskRunnerConfigurationProperties(new ComposedTaskRunnerConfigurationProperties()); taskConfigurationProperties.setUseUserAccessToken(true); boolean result = TaskServiceUtils.isUseUserAccessToken(taskConfigurationProperties, null); assertTrue("Use user access token should be true", result); } @Test public void testUseUserAccessTokenFromTaskPropsDefault() { TaskConfigurationProperties taskConfigurationProperties = new TaskConfigurationProperties(); taskConfigurationProperties.setComposedTaskRunnerConfigurationProperties(new ComposedTaskRunnerConfigurationProperties()); boolean result = TaskServiceUtils.isUseUserAccessToken(taskConfigurationProperties, null); assertFalse("Use user access token should be false", result); } private TaskNode parse(String dsltext) { TaskNode ctn = new TaskParser("test", dsltext, true, true).parse(); return ctn; } private DataSourceProperties getDataSourceProperties() { DataSourceProperties dataSourceProperties = new DataSourceProperties(); dataSourceProperties.setUsername("myUser"); dataSourceProperties.setDriverClassName("myDriver"); dataSourceProperties.setPassword("myPassword"); dataSourceProperties.setUrl("myUrl"); return dataSourceProperties; } }
package org.apache.velocity.runtime.resource; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Vector; import org.apache.commons.collections.ExtendedProperties; import org.apache.velocity.exception.ParseErrorException; import org.apache.velocity.exception.ResourceNotFoundException; import org.apache.velocity.exception.VelocityException; import org.apache.velocity.runtime.RuntimeConstants; import org.apache.velocity.runtime.RuntimeServices; import org.apache.velocity.runtime.log.Log; import org.apache.velocity.runtime.resource.loader.ResourceLoader; import org.apache.velocity.runtime.resource.loader.ResourceLoaderFactory; import org.apache.velocity.util.ClassUtils; import org.apache.velocity.util.StringUtils; /** * Class to manage the text resource for the Velocity Runtime. * * @author <a href="mailto:wglass@forio.com">Will Glass-Husain</a> * @author <a href="mailto:jvanzyl@apache.org">Jason van Zyl</a> * @author <a href="mailto:paulo.gaspar@krankikom.de">Paulo Gaspar</a> * @author <a href="mailto:geirm@optonline.net">Geir Magnusson Jr.</a> * @author <a href="mailto:henning@apache.org">Henning P. Schmiedehausen</a> * @version $Id: ResourceManagerImpl.java 745757 2009-02-19 06:48:10Z nbubna $ */ public class ResourceManagerImpl implements ResourceManager { /** A template resources. */ public static final int RESOURCE_TEMPLATE = 1; /** A static content resource. */ public static final int RESOURCE_CONTENT = 2; /** token used to identify the loader internally. */ private static final String RESOURCE_LOADER_IDENTIFIER = "_RESOURCE_LOADER_IDENTIFIER_"; /** Object implementing ResourceCache to be our resource manager's Resource cache. */ protected ResourceCache globalCache = null; /** The List of templateLoaders that the Runtime will use to locate the InputStream source of a template. */ protected final List resourceLoaders = new ArrayList(); /** * This is a list of the template input stream source initializers, basically properties for a particular template stream * source. The order in this list reflects numbering of the properties i.e. * * <p>&lt;loader-id&gt;.resource.loader.&lt;property&gt; = &lt;value&gt;</p> */ private final List sourceInitializerList = new ArrayList(); /** * Has this Manager been initialized? */ private boolean isInit = false; /** switch to turn off log notice when a resource is found for the first time. */ private boolean logWhenFound = true; /** The internal RuntimeServices object. */ protected RuntimeServices rsvc = null; /** Logging. */ protected Log log = null; /** * Initialize the ResourceManager. * * @param rsvc The Runtime Services object which is associated with this Resource Manager. */ public synchronized void initialize(final RuntimeServices rsvc) { if (isInit) { log.debug("Re-initialization of ResourceLoader attempted and ignored."); return; } ResourceLoader resourceLoader = null; this.rsvc = rsvc; log = rsvc.getLog(); log.trace("Default ResourceManager initializing. (" + this.getClass() + ")"); assembleResourceLoaderInitializers(); for (Iterator it = sourceInitializerList.iterator(); it.hasNext();) { /** * Resource loader can be loaded either via class name or be passed * in as an instance. */ ExtendedProperties configuration = (ExtendedProperties) it.next(); String loaderClass = StringUtils.nullTrim(configuration.getString("class")); ResourceLoader loaderInstance = (ResourceLoader) configuration.get("instance"); if (loaderInstance != null) { resourceLoader = loaderInstance; } else if (loaderClass != null) { resourceLoader = ResourceLoaderFactory.getLoader(rsvc, loaderClass); } else { String msg = "Unable to find '" + configuration.getString(RESOURCE_LOADER_IDENTIFIER) + ".resource.loader.class' specification in configuration." + " This is a critical value. Please adjust configuration."; log.error(msg); throw new VelocityException(msg); } resourceLoader.commonInit(rsvc, configuration); resourceLoader.init(configuration); resourceLoaders.add(resourceLoader); } /* * now see if this is overridden by configuration */ logWhenFound = rsvc.getBoolean(RuntimeConstants.RESOURCE_MANAGER_LOGWHENFOUND, true); /* * now, is a global cache specified? */ String cacheClassName = rsvc.getString(RuntimeConstants.RESOURCE_MANAGER_CACHE_CLASS); Object cacheObject = null; if (org.apache.commons.lang.StringUtils.isNotEmpty(cacheClassName)) { try { cacheObject = ClassUtils.getNewInstance(cacheClassName); } catch (ClassNotFoundException cnfe) { String msg = "The specified class for ResourceCache (" + cacheClassName + ") does not exist or is not accessible to the current classloader."; log.error(msg, cnfe); throw new VelocityException(msg, cnfe); } catch (IllegalAccessException ae) { throw new VelocityException("Could not access class '" + cacheClassName + "'", ae); } catch (InstantiationException ie) { throw new VelocityException("Could not instantiate class '" + cacheClassName + "'", ie); } if (!(cacheObject instanceof ResourceCache)) { String msg = "The specified resource cache class (" + cacheClassName + ") must implement " + ResourceCache.class.getName(); log.error(msg); throw new RuntimeException(msg); } } /* * if we didn't get through that, just use the default. */ if (cacheObject == null) { cacheObject = new ResourceCacheImpl(); } globalCache = (ResourceCache) cacheObject; globalCache.initialize(rsvc); log.trace("Default ResourceManager initialization complete."); } /** * This will produce a List of Hashtables, each hashtable contains the intialization info for a particular resource loader. This * Hashtable will be passed in when initializing the the template loader. */ private void assembleResourceLoaderInitializers() { Vector resourceLoaderNames = rsvc.getConfiguration().getVector(RuntimeConstants.RESOURCE_LOADER); StringUtils.trimStrings(resourceLoaderNames); for (Iterator it = resourceLoaderNames.iterator(); it.hasNext(); ) { /* * The loader id might look something like the following: * * file.resource.loader * * The loader id is the prefix used for all properties * pertaining to a particular loader. */ String loaderName = (String) it.next(); StringBuffer loaderID = new StringBuffer(loaderName); loaderID.append(".").append(RuntimeConstants.RESOURCE_LOADER); ExtendedProperties loaderConfiguration = rsvc.getConfiguration().subset(loaderID.toString()); /* * we can't really count on ExtendedProperties to give us an empty set */ if (loaderConfiguration == null) { log.debug("ResourceManager : No configuration information found "+ "for resource loader named '" + loaderName + "' (id is "+loaderID+"). Skipping it..."); continue; } /* * add the loader name token to the initializer if we need it * for reference later. We can't count on the user to fill * in the 'name' field */ loaderConfiguration.setProperty(RESOURCE_LOADER_IDENTIFIER, loaderName); /* * Add resources to the list of resource loader * initializers. */ sourceInitializerList.add(loaderConfiguration); } } /** * Gets the named resource. Returned class type corresponds to specified type (i.e. <code>Template</code> to <code> * RESOURCE_TEMPLATE</code>). * * This method is now unsynchronized which requires that ResourceCache * implementations be thread safe (as the default is). * * @param resourceName The name of the resource to retrieve. * @param resourceType The type of resource (<code>RESOURCE_TEMPLATE</code>, <code>RESOURCE_CONTENT</code>, etc.). * @param encoding The character encoding to use. * * @return Resource with the template parsed and ready. * * @throws ResourceNotFoundException if template not found from any available source. * @throws ParseErrorException if template cannot be parsed due to syntax (or other) error. */ public Resource getResource(final String resourceName, final int resourceType, final String encoding) throws ResourceNotFoundException, ParseErrorException { /* * Check to see if the resource was placed in the cache. * If it was placed in the cache then we will use * the cached version of the resource. If not we * will load it. * * Note: the type is included in the key to differentiate ContentResource * (static content from #include) with a Template. */ String resourceKey = resourceType + resourceName; Resource resource = globalCache.get(resourceKey); if (resource != null) { try { // avoids additional method call to refreshResource if (resource.requiresChecking()) { /* * both loadResource() and refreshResource() now return * a new Resource instance when they are called * (put in the cache when appropriate) in order to allow * several threads to parse the same template simultaneously. * It is redundant work and will cause more garbage collection but the * benefit is that it allows concurrent parsing and processing * without race conditions when multiple requests try to * refresh/load the same template at the same time. * * Another alternative is to limit template parsing/retrieval * so that only one thread can parse each template at a time * but that creates a scalability bottleneck. * * See VELOCITY-606, VELOCITY-595 and VELOCITY-24 */ resource = refreshResource(resource, encoding); } } catch (ResourceNotFoundException rnfe) { /* * something exceptional happened to that resource * this could be on purpose, * so clear the cache and try again */ globalCache.remove(resourceKey); return getResource(resourceName, resourceType, encoding); } catch (ParseErrorException pee) { log.error("ResourceManager.getResource() exception", pee); throw pee; } catch (RuntimeException re) { log.error("ResourceManager.getResource() exception", re); throw re; } } else { try { /* * it's not in the cache, so load it. */ resource = loadResource(resourceName, resourceType, encoding); if (resource.getResourceLoader().isCachingOn()) { globalCache.put(resourceKey, resource); } } catch (ResourceNotFoundException rnfe) { log.error("ResourceManager : unable to find resource '" + resourceName + "' in any resource loader."); throw rnfe; } catch (ParseErrorException pee) { log.error("ResourceManager.getResource() parse exception", pee); throw pee; } catch (RuntimeException re) { log.error("ResourceManager.getResource() load exception", re); throw re; } } return resource; } /** * Create a new Resource of the specified type. * * @param resourceName The name of the resource to retrieve. * @param resourceType The type of resource (<code>RESOURCE_TEMPLATE</code>, <code>RESOURCE_CONTENT</code>, etc.). * @return new instance of appropriate resource type * @since 1.6 */ protected Resource createResource(String resourceName, int resourceType) { return ResourceFactory.getResource(resourceName, resourceType); } /** * Loads a resource from the current set of resource loaders. * * @param resourceName The name of the resource to retrieve. * @param resourceType The type of resource (<code>RESOURCE_TEMPLATE</code>, <code>RESOURCE_CONTENT</code>, etc.). * @param encoding The character encoding to use. * * @return Resource with the template parsed and ready. * * @throws ResourceNotFoundException if template not found from any available source. * @throws ParseErrorException if template cannot be parsed due to syntax (or other) error. */ protected Resource loadResource(String resourceName, int resourceType, String encoding) throws ResourceNotFoundException, ParseErrorException { Resource resource = createResource(resourceName, resourceType); resource.setRuntimeServices(rsvc); resource.setName(resourceName); resource.setEncoding(encoding); /* * Now we have to try to find the appropriate * loader for this resource. We have to cycle through * the list of available resource loaders and see * which one gives us a stream that we can use to * make a resource with. */ long howOldItWas = 0; for (Iterator it = resourceLoaders.iterator(); it.hasNext();) { ResourceLoader resourceLoader = (ResourceLoader) it.next(); resource.setResourceLoader(resourceLoader); /* * catch the ResourceNotFound exception * as that is ok in our new multi-loader environment */ try { if (resource.process()) { /* * FIXME (gmj) * moved in here - technically still * a problem - but the resource needs to be * processed before the loader can figure * it out due to to the new * multi-path support - will revisit and fix */ if (logWhenFound && log.isDebugEnabled()) { log.debug("ResourceManager : found " + resourceName + " with loader " + resourceLoader.getClassName()); } howOldItWas = resourceLoader.getLastModified(resource); break; } } catch (ResourceNotFoundException rnfe) { /* * that's ok - it's possible to fail in * multi-loader environment */ } } /* * Return null if we can't find a resource. */ if (resource.getData() == null) { throw new ResourceNotFoundException("Unable to find resource '" + resourceName + "'"); } /* * some final cleanup */ resource.setLastModified(howOldItWas); resource.setModificationCheckInterval(resource.getResourceLoader().getModificationCheckInterval()); resource.touch(); return resource; } /** * Takes an existing resource, and 'refreshes' it. This generally means that the source of the resource is checked for changes * according to some cache/check algorithm and if the resource changed, then the resource data is reloaded and re-parsed. * * @param resource resource to refresh * @param encoding character encoding of the resource to refresh. * * @throws ResourceNotFoundException if template not found from current source for this Resource * @throws ParseErrorException if template cannot be parsed due to syntax (or other) error. */ protected Resource refreshResource(Resource resource, final String encoding) throws ResourceNotFoundException, ParseErrorException { /* * The resource knows whether it needs to be checked * or not, and the resource's loader can check to * see if the source has been modified. If both * these conditions are true then we must reload * the input stream and parse it to make a new * AST for the resource. */ /* * touch() the resource to reset the counters */ resource.touch(); /* check whether this can now be found in a higher priority * resource loader. if so, pass the request off to loadResource. */ ResourceLoader loader = resource.getResourceLoader(); if (resourceLoaders.size() > 0 && resourceLoaders.indexOf(loader) > 0) { String name = resource.getName(); if (loader != getLoaderForResource(name)) { return loadResource(name, resource.getType(), encoding); } } if (resource.isSourceModified()) { /* * now check encoding info. It's possible that the newly declared * encoding is different than the encoding already in the resource * this strikes me as bad... */ if (!org.apache.commons.lang.StringUtils.equals(resource.getEncoding(), encoding)) { log.warn("Declared encoding for template '" + resource.getName() + "' is different on reload. Old = '" + resource.getEncoding() + "' New = '" + encoding); resource.setEncoding(encoding); } /* * read how old the resource is _before_ * processing (=>reading) it */ long howOldItWas = loader.getLastModified(resource); String resourceKey = resource.getType() + resource.getName(); /* * we create a copy to avoid partially overwriting a * template which may be in use in another thread */ Resource newResource = ResourceFactory.getResource(resource.getName(), resource.getType()); newResource.setRuntimeServices(rsvc); newResource.setName(resource.getName()); newResource.setEncoding(resource.getEncoding()); newResource.setResourceLoader(loader); newResource.setModificationCheckInterval(loader.getModificationCheckInterval()); newResource.process(); newResource.setLastModified(howOldItWas); resource = newResource; globalCache.put(resourceKey, newResource); } return resource; } /** * Gets the named resource. Returned class type corresponds to specified type (i.e. <code>Template</code> to <code> * RESOURCE_TEMPLATE</code>). * * @param resourceName The name of the resource to retrieve. * @param resourceType The type of resource (<code>RESOURCE_TEMPLATE</code>, <code>RESOURCE_CONTENT</code>, etc.). * * @return Resource with the template parsed and ready. * * @throws ResourceNotFoundException if template not found from any available source. * @throws ParseErrorException if template cannot be parsed due to syntax (or other) error. * @throws Exception if a problem in parse * * @deprecated Use {@link #getResource(String resourceName, int resourceType, String encoding )} */ public Resource getResource(String resourceName, int resourceType) throws ResourceNotFoundException, ParseErrorException, Exception { return getResource(resourceName, resourceType, RuntimeConstants.ENCODING_DEFAULT); } /** * Determines if a template exists, and returns name of the loader that provides it. This is a slightly less hokey way to * support the Velocity.templateExists() utility method, which was broken when per-template encoding was introduced. We can * revisit this. * * @param resourceName Name of template or content resource * * @return class name of loader than can provide it */ public String getLoaderNameForResource(String resourceName) { ResourceLoader loader = getLoaderForResource(resourceName); if (loader == null) { return null; } return loader.getClass().toString(); } /** * Returns the first {@link ResourceLoader} in which the specified * resource exists. */ private ResourceLoader getLoaderForResource(String resourceName) { for (Iterator i = resourceLoaders.iterator(); i.hasNext(); ) { ResourceLoader loader = (ResourceLoader)i.next(); if (loader.resourceExists(resourceName)) { return loader; } } return null; } public ResourceCache getGlobalCache() { return globalCache; } }
package WebService.http; import java.util.Properties; import AGVS.Util.Log; import java.io.FileInputStream; public class Config extends Properties { public static final String CONFIG_FILE = "config.properties"; public static final String PROP_HTTP_PORTA = "http.porta"; public static final String PROP_HTTP_DEFAULT_RESOURCE = "http.default.resource"; public static final String PROP_HTTP_DEFAULT_REPOS = "http.default.repos"; public static final int DEFAULT_HTTP_PORTA = 80; public static final String PROP_REP_CONVERT_PAG_HTTP = "pages"; public static final String PROP_LENGHT_BUFFER_SCADA = "bufferScada"; public static final String PROP_LOGIN = "login"; public static final String PROP_PASSWORD = "password"; public static final String PROP_SERIAL_PORT = "serial.port"; public static final String PROP_SERIAL_BAUDRATE = "serial.baudrate"; public static final String PROP_SOROCABA_MS1_MAC64 = "sorocaba_ms1_mac64"; public static final String PROP_SOROCABA_MS1_MAC16 = "sorocaba_ms1_mac16"; public static final String PROP_SOROCABA_MS2_MAC64 = "sorocaba_ms2_mac64"; public static final String PROP_SOROCABA_MS2_MAC16 = "sorocaba_ms2_mac16"; public static final String PROP_SOROCABA_MS3_MAC64 = "sorocaba_ms3_mac64"; public static final String PROP_SOROCABA_MS3_MAC16 = "sorocaba_ms3_mac16"; public static final String PROP_SOROCABA_MS4_MAC64 = "sorocaba_ms4_mac64"; public static final String PROP_SOROCABA_MS4_MAC16 = "sorocaba_ms4_mac16"; public static final String PROP_SOROCABA_MS5_MAC64 = "sorocaba_ms5_mac64"; public static final String PROP_SOROCABA_MS5_MAC16 = "sorocaba_ms5_mac16"; public static final String PROP_SEMAFARO_1_VERDE_1 = "Semafaro1Verde1"; public static final String PROP_SEMAFARO_1_VERDE_2 = "Semafaro1Verde2"; public static final String PROP_SEMAFARO_1_VERDE_3 = "Semafaro1Verde3"; public static final String PROP_SEMAFARO_1_VERDE_4 = "Semafaro1Verde4"; public static final String PROP_SEMAFARO_1_VERMELHO_1 = "Semafaro1Vermelho1"; public static final String PROP_SEMAFARO_1_VERMELHO_2 = "Semafaro1Vermelho2"; public static final String PROP_SEMAFARO_1_VERMELHO_3 = "Semafaro1Vermelho3"; public static final String PROP_SEMAFARO_1_VERMELHO_4 = "Semafaro1Vermelho4"; public static final String PROP_SEMAFARO_2_VERDE_1 = "Semafaro2Verde1"; public static final String PROP_SEMAFARO_2_VERDE_2 = "Semafaro2Verde2"; public static final String PROP_SEMAFARO_2_VERDE_3 = "Semafaro2Verde3"; public static final String PROP_SEMAFARO_2_VERDE_4 = "Semafaro2Verde4"; public static final String PROP_SEMAFARO_2_VERMELHO_1 = "Semafaro2Vermelho1"; public static final String PROP_SEMAFARO_2_VERMELHO_2 = "Semafaro2Vermelho2"; public static final String PROP_SEMAFARO_2_VERMELHO_3 = "Semafaro2Vermelho3"; public static final String PROP_SEMAFARO_2_VERMELHO_4 = "Semafaro2Vermelho4"; public static final String PROP_SEMAFARO_3_VERDE_1 = "Semafaro3Verde1"; public static final String PROP_SEMAFARO_3_VERDE_2 = "Semafaro3Verde2"; public static final String PROP_SEMAFARO_3_VERDE_3 = "Semafaro3Verde3"; public static final String PROP_SEMAFARO_3_VERDE_4 = "Semafaro3Verde4"; public static final String PROP_SEMAFARO_3_VERMELHO_1 = "Semafaro3Vermelho1"; public static final String PROP_SEMAFARO_3_VERMELHO_2 = "Semafaro3Vermelho2"; public static final String PROP_SEMAFARO_3_VERMELHO_3 = "Semafaro3Vermelho3"; public static final String PROP_SEMAFARO_3_VERMELHO_4 = "Semafaro3Vermelho4"; public static final String PROP_SEMAFARO_4_VERDE_1 = "Semafaro4Verde1"; public static final String PROP_SEMAFARO_4_VERDE_2 = "Semafaro4Verde2"; public static final String PROP_SEMAFARO_4_VERDE_3 = "Semafaro4Verde3"; public static final String PROP_SEMAFARO_4_VERDE_4 = "Semafaro4Verde4"; public static final String PROP_SEMAFARO_4_VERMELHO_1 = "Semafaro4Vermelho1"; public static final String PROP_SEMAFARO_4_VERMELHO_2 = "Semafaro4Vermelho2"; public static final String PROP_SEMAFARO_4_VERMELHO_3 = "Semafaro4Vermelho3"; public static final String PROP_SEMAFARO_4_VERMELHO_4 = "Semafaro4Vermelho4"; public static final String PROP_SEMAFARO_5_VERDE_1 = "Semafaro5Verde1"; public static final String PROP_SEMAFARO_5_VERDE_2 = "Semafaro5Verde2"; public static final String PROP_SEMAFARO_5_VERDE_3 = "Semafaro5Verde3"; public static final String PROP_SEMAFARO_5_VERDE_4 = "Semafaro5Verde4"; public static final String PROP_SEMAFARO_5_VERMELHO_1 = "Semafaro5Vermelho1"; public static final String PROP_SEMAFARO_5_VERMELHO_2 = "Semafaro5Vermelho2"; public static final String PROP_SEMAFARO_5_VERMELHO_3 = "Semafaro5Vermelho3"; public static final String PROP_SEMAFARO_5_VERMELHO_4 = "Semafaro5Vermelho4"; public static final String PROP_SEMAFARO_6_VERDE_1 = "Semafaro6Verde1"; public static final String PROP_SEMAFARO_6_VERDE_2 = "Semafaro6Verde2"; public static final String PROP_SEMAFARO_6_VERDE_3 = "Semafaro6Verde3"; public static final String PROP_SEMAFARO_6_VERDE_4 = "Semafaro6Verde4"; public static final String PROP_SEMAFARO_6_VERMELHO_1 = "Semafaro6Vermelho1"; public static final String PROP_SEMAFARO_6_VERMELHO_2 = "Semafaro6Vermelho2"; public static final String PROP_SEMAFARO_6_VERMELHO_3 = "Semafaro6Vermelho3"; public static final String PROP_SEMAFARO_6_VERMELHO_4 = "Semafaro6Vermelho4"; public static final String PROP_SEMAFARO_7_VERDE_1 = "Semafaro7Verde1"; public static final String PROP_SEMAFARO_7_VERDE_2 = "Semafaro7Verde2"; public static final String PROP_SEMAFARO_7_VERDE_3 = "Semafaro7Verde3"; public static final String PROP_SEMAFARO_7_VERDE_4 = "Semafaro7Verde4"; public static final String PROP_SEMAFARO_7_VERMELHO_1 = "Semafaro7Vermelho1"; public static final String PROP_SEMAFARO_7_VERMELHO_2 = "Semafaro7Vermelho2"; public static final String PROP_SEMAFARO_7_VERMELHO_3 = "Semafaro7Vermelho3"; public static final String PROP_SEMAFARO_7_VERMELHO_4 = "Semafaro7Vermelho4"; public static final String PROP_PARADA_1_TKL_VAZIO = "Parada1TKLVazio"; public static final String PROP_PARADA_1_TKL_VAZIO2 = "Parada1TKLVazio2"; public static final String PROP_PARADA_1_TKL_VAZIO3 = "Parada1TKLVazio3"; public static final String PROP_PARADA_2_TKL_VAZIO = "Parada2TKLVazio"; public static final String PROP_PARADA_2_TKL_VAZIO2 = "Parada2TKLVazio2"; public static final String PROP_PARADA_1_TKL_CHEIO = "Parada1TKLCheio"; public static final String PROP_PARADA_1_TKL_CHEIO2 = "Parada1TKLCheio2"; public static final String PROP_ENTRADA_1_CANCELA = "Entrada1Cancela"; public static final String PROP_ENTRADA_2_CANCELA = "Entrada2Cancela"; public static final String PROP_ENTRADA_3_CANCELA = "Entrada3Cancela"; public static final String PROP_ENTRADA_4_CANCELA = "Entrada4Cancela"; public static final String PROP_ENTRADA_5_CANCELA = "Entrada5Cancela"; public static final String PROP_ENTRADA_6_CANCELA = "Entrada6Cancela"; public static final String PROP_ENTRADA_7_CANCELA = "Entrada7Cancela"; public static final String PROP_ENTRADA_8_CANCELA = "Entrada8Cancela"; public static final String PROP_ENTRADA_9_CANCELA = "Entrada9Cancela"; public static final String PROP_ENTRADA_10_CANCELA = "Entrada10Cancela"; public static final String PROP_SAIDA_1_CANCELA = "Saida1Cancela"; public static final String PROP_SAIDA_2_CANCELA = "Saida2Cancela"; public static final String PROP_SAIDA_3_CANCELA = "Saida3Cancela"; public static final String PROP_SAIDA_4_CANCELA = "Saida4Cancela"; public static final String PROP_SAIDA_5_CANCELA = "Saida5Cancela"; public static final String PROP_SAIDA_6_CANCELA = "Saida6Cancela"; public static final String PROP_SAIDA_7_CANCELA = "Saida7Cancela"; public static final String PROP_SAIDA_8_CANCELA = "Saida8Cancela"; public static final String PROP_SAIDA_9_CANCELA = "Saida9Cancela"; public static final String PROP_SAIDA_10_CANCELA = "Saida10Cancela"; public static final String PROP_SAIDA_11_CANCELA = "Saida11Cancela"; public static final String PROP_ENTRADA_1_CAMINHAO = "Entrada1Caminhao"; public static final String PROP_ENTRADA_2_CAMINHAO = "Entrada2Caminhao"; public static final String PROP_ENTRADA_3_CAMINHAO = "Entrada3Caminhao"; public static final String PROP_ENTRADA_4_CAMINHAO = "Entrada4Caminhao"; public static final String PROP_SAIDA_1_CAMINHAO = "Saida1Caminhao"; public static final String PROP_SAIDA_2_CAMINHAO = "Saida2Caminhao"; public static final String PROP_SAIDA_3_CAMINHAO = "Saida3Caminhao"; public static final String PROP_SAIDA_4_CAMINHAO = "Saida4Caminhao"; public static final String PROP_P1 = "p1"; public static final String PROP_P2 = "p2"; public static final String PROP_P3 = "p3"; public static final String PROP_P4 = "p4"; public static final String PROP_P5 = "p5"; public static final String PROP_P6 = "p6"; public static final String PROP_GY_MS201 = "gy_ms201"; public static final String PROP_GY_MS202 = "gy_ms202"; public static final String PROP_GY_MS203 = "gy_ms203"; public static final String PROP_GY_MS204 = "gy_ms204"; public static final String PROP_GY_MS205 = "gy_ms205"; public static final String PROP_GY_MS206 = "gy_ms206"; public static final String PROP_GY_MS207 = "gy_ms207"; public static final String PROP_GY_MS208 = "gy_ms208"; public static final String PROP_GY_MS209 = "gy_ms209"; public static final String PROP_GY_MS210 = "gy_ms210"; public static final String PROP_GY_MS211 = "gy_ms211"; public static final String PROP_GY_MS212 = "gy_ms212"; public static final String PROP_GY_MS213 = "gy_ms213"; public static final String PROP_GY_MS214 = "gy_ms214"; public static final String PROP_GY_MS215 = "gy_ms215"; public static final String PROP_GY_MS216 = "gy_ms216"; public static final String PROP_GY_MS217 = "gy_ms217"; public static final String PROP_GY_MS218 = "gy_ms218"; public static final String PROP_GY_MS219 = "gy_ms219"; public static final String PROP_GY_MS220 = "gy_ms220"; public static final String PROP_SEM_H1V1_VD1 = "SEM.H1V1.VD1"; public static final String PROP_SEM_H1V1_VD2 = "SEM.H1V1.VD2"; public static final String PROP_SEM_H1V1_VD3 = "SEM.H1V1.VD3"; public static final String PROP_SEM_H1V1_VD4 = "SEM.H1V1.VD4"; public static final String PROP_SEM_H1V1_VD5 = "SEM.H1V1.VD5"; public static final String PROP_SEM_H1V1_VM1 = "SEM.H1V1.VM1"; public static final String PROP_SEM_H1V1_VM2 = "SEM.H1V1.VM2"; public static final String PROP_SEM_H1V1_VM3 = "SEM.H1V1.VM3"; public static final String PROP_SEM_H1V1_VM4 = "SEM.H1V1.VM4"; public static final String PROP_SEM_H1V1_VM5 = "SEM.H1V1.VM5"; public static final String PROP_SEM_H2V1_VD1 = "SEM.H2V1.VD1"; public static final String PROP_SEM_H2V1_VD2 = "SEM.H2V1.VD2"; public static final String PROP_SEM_H2V1_VD3 = "SEM.H2V1.VD3"; public static final String PROP_SEM_H2V1_VD4 = "SEM.H2V1.VD4"; public static final String PROP_SEM_H2V1_VD5 = "SEM.H2V1.VD5"; public static final String PROP_SEM_H2V1_VM1 = "SEM.H2V1.VM1"; public static final String PROP_SEM_H2V1_VM2 = "SEM.H2V1.VM2"; public static final String PROP_SEM_H2V1_VM3 = "SEM.H2V1.VM3"; public static final String PROP_SEM_H2V1_VM4 = "SEM.H2V1.VM4"; public static final String PROP_SEM_H2V1_VM5 = "SEM.H2V1.VM5"; public static final String PROP_SEM_H3V1_VD1 = "SEM.H3V1.VD1"; public static final String PROP_SEM_H3V1_VD2 = "SEM.H3V1.VD2"; public static final String PROP_SEM_H3V1_VD3 = "SEM.H3V1.VD3"; public static final String PROP_SEM_H3V1_VD4 = "SEM.H3V1.VD4"; public static final String PROP_SEM_H3V1_VD5 = "SEM.H3V1.VD5"; public static final String PROP_SEM_H3V1_VM1 = "SEM.H3V1.VM1"; public static final String PROP_SEM_H3V1_VM2 = "SEM.H3V1.VM2"; public static final String PROP_SEM_H3V1_VM3 = "SEM.H3V1.VM3"; public static final String PROP_SEM_H3V1_VM4 = "SEM.H3V1.VM4"; public static final String PROP_SEM_H3V1_VM5 = "SEM.H3V1.VM5"; public static final String PROP_SEM_H4V1_VD1 = "SEM.H4V1.VD1"; public static final String PROP_SEM_H4V1_VD2 = "SEM.H4V1.VD2"; public static final String PROP_SEM_H4V1_VD3 = "SEM.H4V1.VD3"; public static final String PROP_SEM_H4V1_VD4 = "SEM.H4V1.VD4"; public static final String PROP_SEM_H4V1_VD5 = "SEM.H4V1.VD5"; public static final String PROP_SEM_H4V1_VM1 = "SEM.H4V1.VM1"; public static final String PROP_SEM_H4V1_VM2 = "SEM.H4V1.VM2"; public static final String PROP_SEM_H4V1_VM3 = "SEM.H4V1.VM3"; public static final String PROP_SEM_H4V1_VM4 = "SEM.H4V1.VM4"; public static final String PROP_SEM_H4V1_VM5 = "SEM.H4V1.VM5"; public static final String PROP_SEM_H1V2_VD1 = "SEM.H1V2.VD1"; public static final String PROP_SEM_H1V2_VD2 = "SEM.H1V2.VD2"; public static final String PROP_SEM_H1V2_VD3 = "SEM.H1V2.VD3"; public static final String PROP_SEM_H1V2_VD4 = "SEM.H1V2.VD4"; public static final String PROP_SEM_H1V2_VD5 = "SEM.H1V2.VD5"; public static final String PROP_SEM_H1V2_VM1 = "SEM.H1V2.VM1"; public static final String PROP_SEM_H1V2_VM2 = "SEM.H1V2.VM2"; public static final String PROP_SEM_H1V2_VM3 = "SEM.H1V2.VM3"; public static final String PROP_SEM_H1V2_VM4 = "SEM.H1V2.VM4"; public static final String PROP_SEM_H1V2_VM5 = "SEM.H1V2.VM5"; public static final String PROP_SEM_H2V2_VD1 = "SEM.H2V2.VD1"; public static final String PROP_SEM_H2V2_VD2 = "SEM.H2V2.VD2"; public static final String PROP_SEM_H2V2_VD3 = "SEM.H2V2.VD3"; public static final String PROP_SEM_H2V2_VD4 = "SEM.H2V2.VD4"; public static final String PROP_SEM_H2V2_VD5 = "SEM.H2V2.VD5"; public static final String PROP_SEM_H2V2_VM1 = "SEM.H2V2.VM1"; public static final String PROP_SEM_H2V2_VM2 = "SEM.H2V2.VM2"; public static final String PROP_SEM_H2V2_VM3 = "SEM.H2V2.VM3"; public static final String PROP_SEM_H2V2_VM4 = "SEM.H2V2.VM4"; public static final String PROP_SEM_H2V2_VM5 = "SEM.H2V2.VM5"; public static final String PROP_SEM_H3V2_VD1 = "SEM.H3V2.VD1"; public static final String PROP_SEM_H3V2_VD2 = "SEM.H3V2.VD2"; public static final String PROP_SEM_H3V2_VD3 = "SEM.H3V2.VD3"; public static final String PROP_SEM_H3V2_VD4 = "SEM.H3V2.VD4"; public static final String PROP_SEM_H3V2_VD5 = "SEM.H3V2.VD5"; public static final String PROP_SEM_H3V2_VM1 = "SEM.H3V2.VM1"; public static final String PROP_SEM_H3V2_VM2 = "SEM.H3V2.VM2"; public static final String PROP_SEM_H3V2_VM3 = "SEM.H3V2.VM3"; public static final String PROP_SEM_H3V2_VM4 = "SEM.H3V2.VM4"; public static final String PROP_SEM_H3V2_VM5 = "SEM.H3V2.VM5"; public static final String PROP_SEM_H4V2_VD1 = "SEM.H4V2.VD1"; public static final String PROP_SEM_H4V2_VD2 = "SEM.H4V2.VD2"; public static final String PROP_SEM_H4V2_VD3 = "SEM.H4V2.VD3"; public static final String PROP_SEM_H4V2_VD4 = "SEM.H4V2.VD4"; public static final String PROP_SEM_H4V2_VD5 = "SEM.H4V2.VD5"; public static final String PROP_SEM_H4V2_VM1 = "SEM.H4V2.VM1"; public static final String PROP_SEM_H4V2_VM2 = "SEM.H4V2.VM2"; public static final String PROP_SEM_H4V2_VM3 = "SEM.H4V2.VM3"; public static final String PROP_SEM_H4V2_VM4 = "SEM.H4V2.VM4"; public static final String PROP_SEM_H4V2_VM5 = "SEM.H4V2.VM5"; public static final String PROP_SEM_H1V3_VD1 = "SEM.H1V3.VD1"; public static final String PROP_SEM_H1V3_VD2 = "SEM.H1V3.VD2"; public static final String PROP_SEM_H1V3_VD3 = "SEM.H1V3.VD3"; public static final String PROP_SEM_H1V3_VD4 = "SEM.H1V3.VD4"; public static final String PROP_SEM_H1V3_VD5 = "SEM.H1V3.VD5"; public static final String PROP_SEM_H1V3_VM1 = "SEM.H1V3.VM1"; public static final String PROP_SEM_H1V3_VM2 = "SEM.H1V3.VM2"; public static final String PROP_SEM_H1V3_VM3 = "SEM.H1V3.VM3"; public static final String PROP_SEM_H1V3_VM4 = "SEM.H1V3.VM4"; public static final String PROP_SEM_H1V3_VM5 = "SEM.H1V3.VM5"; public static final String PROP_SEM_H2V3_VD1 = "SEM.H2V3.VD1"; public static final String PROP_SEM_H2V3_VD2 = "SEM.H2V3.VD2"; public static final String PROP_SEM_H2V3_VD3 = "SEM.H2V3.VD3"; public static final String PROP_SEM_H2V3_VD4 = "SEM.H2V3.VD4"; public static final String PROP_SEM_H2V3_VD5 = "SEM.H2V3.VD5"; public static final String PROP_SEM_H2V3_VM1 = "SEM.H2V3.VM1"; public static final String PROP_SEM_H2V3_VM2 = "SEM.H2V3.VM2"; public static final String PROP_SEM_H2V3_VM3 = "SEM.H2V3.VM3"; public static final String PROP_SEM_H2V3_VM4 = "SEM.H2V3.VM4"; public static final String PROP_SEM_H2V3_VM5 = "SEM.H2V3.VM5"; public static final String PROP_SEM_H3V3_VD1 = "SEM.H3V3.VD1"; public static final String PROP_SEM_H3V3_VD2 = "SEM.H3V3.VD2"; public static final String PROP_SEM_H3V3_VD3 = "SEM.H3V3.VD3"; public static final String PROP_SEM_H3V3_VD4 = "SEM.H3V3.VD4"; public static final String PROP_SEM_H3V3_VD5 = "SEM.H3V3.VD5"; public static final String PROP_SEM_H3V3_VM1 = "SEM.H3V3.VM1"; public static final String PROP_SEM_H3V3_VM2 = "SEM.H3V3.VM2"; public static final String PROP_SEM_H3V3_VM3 = "SEM.H3V3.VM3"; public static final String PROP_SEM_H3V3_VM4 = "SEM.H3V3.VM4"; public static final String PROP_SEM_H3V3_VM5 = "SEM.H3V3.VM5"; public static final String PROP_SEM_H4V3_VD1 = "SEM.H4V3.VD1"; public static final String PROP_SEM_H4V3_VD2 = "SEM.H4V3.VD2"; public static final String PROP_SEM_H4V3_VD3 = "SEM.H4V3.VD3"; public static final String PROP_SEM_H4V3_VD4 = "SEM.H4V3.VD4"; public static final String PROP_SEM_H4V3_VD5 = "SEM.H4V3.VD5"; public static final String PROP_SEM_H4V3_VM1 = "SEM.H4V3.VM1"; public static final String PROP_SEM_H4V3_VM2 = "SEM.H4V3.VM2"; public static final String PROP_SEM_H4V3_VM3 = "SEM.H4V3.VM3"; public static final String PROP_SEM_H4V3_VM4 = "SEM.H4V3.VM4"; public static final String PROP_SEM_H4V3_VM5 = "SEM.H4V3.VM5"; public static final String PROP_SEM_H1V4_VD1 = "SEM.H1V4.VD1"; public static final String PROP_SEM_H1V4_VD2 = "SEM.H1V4.VD2"; public static final String PROP_SEM_H1V4_VD3 = "SEM.H1V4.VD3"; public static final String PROP_SEM_H1V4_VD4 = "SEM.H1V4.VD4"; public static final String PROP_SEM_H1V4_VD5 = "SEM.H1V4.VD5"; public static final String PROP_SEM_H1V4_VM1 = "SEM.H1V4.VM1"; public static final String PROP_SEM_H1V4_VM2 = "SEM.H1V4.VM2"; public static final String PROP_SEM_H1V4_VM3 = "SEM.H1V4.VM3"; public static final String PROP_SEM_H1V4_VM4 = "SEM.H1V4.VM4"; public static final String PROP_SEM_H1V4_VM5 = "SEM.H1V4.VM5"; public static final String PROP_SEM_H2V4_VD1 = "SEM.H2V4.VD1"; public static final String PROP_SEM_H2V4_VD2 = "SEM.H2V4.VD2"; public static final String PROP_SEM_H2V4_VD3 = "SEM.H2V4.VD3"; public static final String PROP_SEM_H2V4_VD4 = "SEM.H2V4.VD4"; public static final String PROP_SEM_H2V4_VD5 = "SEM.H2V4.VD5"; public static final String PROP_SEM_H2V4_VM1 = "SEM.H2V4.VM1"; public static final String PROP_SEM_H2V4_VM2 = "SEM.H2V4.VM2"; public static final String PROP_SEM_H2V4_VM3 = "SEM.H2V4.VM3"; public static final String PROP_SEM_H2V4_VM4 = "SEM.H2V4.VM4"; public static final String PROP_SEM_H2V4_VM5 = "SEM.H2V4.VM5"; public static final String PROP_SEM_H3V4_VD1 = "SEM.H3V4.VD1"; public static final String PROP_SEM_H3V4_VD2 = "SEM.H3V4.VD2"; public static final String PROP_SEM_H3V4_VD3 = "SEM.H3V4.VD3"; public static final String PROP_SEM_H3V4_VD4 = "SEM.H3V4.VD4"; public static final String PROP_SEM_H3V4_VD5 = "SEM.H3V4.VD5"; public static final String PROP_SEM_H3V4_VM1 = "SEM.H3V4.VM1"; public static final String PROP_SEM_H3V4_VM2 = "SEM.H3V4.VM2"; public static final String PROP_SEM_H3V4_VM3 = "SEM.H3V4.VM3"; public static final String PROP_SEM_H3V4_VM4 = "SEM.H3V4.VM4"; public static final String PROP_SEM_H3V4_VM5 = "SEM.H3V4.VM5"; public static final String PROP_SEM_H4V4_VD1 = "SEM.H4V4.VD1"; public static final String PROP_SEM_H4V4_VD2 = "SEM.H4V4.VD2"; public static final String PROP_SEM_H4V4_VD3 = "SEM.H4V4.VD3"; public static final String PROP_SEM_H4V4_VD4 = "SEM.H4V4.VD4"; public static final String PROP_SEM_H4V4_VD5 = "SEM.H4V4.VD5"; public static final String PROP_SEM_H4V4_VM1 = "SEM.H4V4.VM1"; public static final String PROP_SEM_H4V4_VM2 = "SEM.H4V4.VM2"; public static final String PROP_SEM_H4V4_VM3 = "SEM.H4V4.VM3"; public static final String PROP_SEM_H4V4_VM4 = "SEM.H4V4.VM4"; public static final String PROP_SEM_H4V4_VM5 = "SEM.H4V4.VM5"; public static final String PROP_SEM_H1V5_VD1 = "SEM.H1V5.VD1"; public static final String PROP_SEM_H1V5_VD2 = "SEM.H1V5.VD2"; public static final String PROP_SEM_H1V5_VD3 = "SEM.H1V5.VD3"; public static final String PROP_SEM_H1V5_VD4 = "SEM.H1V5.VD4"; public static final String PROP_SEM_H1V5_VD5 = "SEM.H1V5.VD5"; public static final String PROP_SEM_H1V5_VM1 = "SEM.H1V5.VM1"; public static final String PROP_SEM_H1V5_VM2 = "SEM.H1V5.VM2"; public static final String PROP_SEM_H1V5_VM3 = "SEM.H1V5.VM3"; public static final String PROP_SEM_H1V5_VM4 = "SEM.H1V5.VM4"; public static final String PROP_SEM_H1V5_VM5 = "SEM.H1V5.VM5"; public static final String PROP_SEM_H2V5_VD1 = "SEM.H2V5.VD1"; public static final String PROP_SEM_H2V5_VD2 = "SEM.H2V5.VD2"; public static final String PROP_SEM_H2V5_VD3 = "SEM.H2V5.VD3"; public static final String PROP_SEM_H2V5_VD4 = "SEM.H2V5.VD4"; public static final String PROP_SEM_H2V5_VD5 = "SEM.H2V5.VD5"; public static final String PROP_SEM_H2V5_VM1 = "SEM.H2V5.VM1"; public static final String PROP_SEM_H2V5_VM2 = "SEM.H2V5.VM2"; public static final String PROP_SEM_H2V5_VM3 = "SEM.H2V5.VM3"; public static final String PROP_SEM_H2V5_VM4 = "SEM.H2V5.VM4"; public static final String PROP_SEM_H2V5_VM5 = "SEM.H2V5.VM5"; public static final String PROP_SEM_H3V5_VD1 = "SEM.H3V5.VD1"; public static final String PROP_SEM_H3V5_VD2 = "SEM.H3V5.VD2"; public static final String PROP_SEM_H3V5_VD3 = "SEM.H3V5.VD3"; public static final String PROP_SEM_H3V5_VD4 = "SEM.H3V5.VD4"; public static final String PROP_SEM_H3V5_VD5 = "SEM.H3V5.VD5"; public static final String PROP_SEM_H3V5_VM1 = "SEM.H3V5.VM1"; public static final String PROP_SEM_H3V5_VM2 = "SEM.H3V5.VM2"; public static final String PROP_SEM_H3V5_VM3 = "SEM.H3V5.VM3"; public static final String PROP_SEM_H3V5_VM4 = "SEM.H3V5.VM4"; public static final String PROP_SEM_H3V5_VM5 = "SEM.H3V5.VM5"; public static final String PROP_SEM_H4V5_VD1 = "SEM.H4V5.VD1"; public static final String PROP_SEM_H4V5_VD2 = "SEM.H4V5.VD2"; public static final String PROP_SEM_H4V5_VD3 = "SEM.H4V5.VD3"; public static final String PROP_SEM_H4V5_VD4 = "SEM.H4V5.VD4"; public static final String PROP_SEM_H4V5_VD5 = "SEM.H4V5.VD5"; public static final String PROP_SEM_H4V5_VM1 = "SEM.H4V5.VM1"; public static final String PROP_SEM_H4V5_VM2 = "SEM.H4V5.VM2"; public static final String PROP_SEM_H4V5_VM3 = "SEM.H4V5.VM3"; public static final String PROP_SEM_H4V5_VM4 = "SEM.H4V5.VM4"; public static final String PROP_SEM_H4V5_VM5 = "SEM.H4V5.VM5"; public static final String PROP_SEMAFARO_TYPE = "semafaro.tipo"; public static final String PROP_MODE_MASH_TKL = "mode.mash.tkl"; public static final String PROP_PROJ = "proj"; public static final String PROP_BANCO_DATA_CONVERT = "banco.data.convert"; public static final String PROP_PC_DATA_CONVERT = "pc.data.convert"; private static class Holder { private static final Config INSTANCE = new Config(); } public static Config getInstance() { return Holder.INSTANCE; } private Config() { try { System.out.println("Carregando arquivo de configuracao."); load(new FileInputStream(CONFIG_FILE)); } catch (Exception e) { System.out.println("Falha ao tentar carregar arquivo de configuracao."); new Log(e); } } public String getString(String name) { return getProperty(name); } public int getInt(String name) { return Integer.parseInt(getProperty(name)); } }
package wulfric.predictor.evaluation; import java.io.IOException; import java.io.PrintWriter; import java.util.Arrays; import wulfric.cmdline.Argument; import wulfric.cmdline.CmdLineParser; import wulfric.core.Instance; import wulfric.core.Instances; import wulfric.core.io.InstancesReader; import wulfric.predictor.Classifier; import wulfric.predictor.Learner.Task; import wulfric.predictor.ProbabilisticClassifier; import wulfric.predictor.Regressor; import wulfric.predictor.io.PredictorReader; import wulfric.util.OptimUtils; /** * Class for making predictions. */ public class Predictor { /** * Makes predictions for a dataset. * * @param regressor the model. * @param instances the dataset. * @param path the output path. * @param residual <code>true</code> if residuals are the output. * @throws IOException */ public static void predict(Regressor regressor, Instances instances, String path, boolean residual) throws IOException { PrintWriter out = new PrintWriter(path); if (residual) { for (Instance instance : instances) { double pred = regressor.regress(instance); out.println(instance.getTarget() - pred); } } else { for (Instance instance : instances) { double pred = regressor.regress(instance); out.println(pred); } } out.flush(); out.close(); } /** * Makes predictions for a dataset. * * @param classifier the model. * @param instances the dataset. * @param path the output path. * @throws IOException */ public static void predict(Classifier classifier, Instances instances, String path) throws IOException { PrintWriter out = new PrintWriter(path); for (Instance instance : instances) { int pred = classifier.classify(instance); out.println(pred); } out.flush(); out.close(); } static class Options { @Argument(name = "-r", description = "attribute file path") String attPath = null; @Argument(name = "-d", description = "data set path", required = true) String dataPath = null; @Argument(name = "-m", description = "model path", required = true) String modelPath = null; @Argument(name = "-p", description = "prediction path") String predictionPath = null; @Argument(name = "-R", description = "residual path") String residualPath = null; @Argument(name = "-g", description = "task between classification (c) and regression (r) (default: r)") String task = "r"; @Argument(name = "-P", description = "output probablity (default: false)") boolean prob = false; } /** * Makes predictions on a dataset. * * <pre> * usage: wulfric.predictor.evaluation.Predictor * -d data set path * -m model path * [-r] attribute file path * [-p] prediction path * [-R] residual path * [-g] task between classification (c) and regression (r) (default: r) * [-P] output probablity (default: false) * </pre> * * @param args the command line arguments. * @throws Exception */ public static void main(String[] args) throws Exception { Options opts = new Options(); CmdLineParser parser = new CmdLineParser(Predictor.class, opts); Task task = null; try { parser.parse(args); task = Task.get(opts.task); } catch (IllegalArgumentException e) { parser.printUsage(); System.exit(1); } Instances instances = InstancesReader.read(opts.attPath, opts.dataPath); wulfric.predictor.Predictor predictor = PredictorReader.read(opts.modelPath); switch (task) { case REGRESSION: Regressor regressor = (Regressor) predictor; double rmse = Evaluator.evalRMSE(regressor, instances); System.out.println("RMSE on Test: " + rmse); if (opts.predictionPath != null) { PrintWriter out = new PrintWriter(opts.predictionPath); for (Instance instance : instances) { double pred = regressor.regress(instance); out.println(pred); } out.flush(); out.close(); } if (opts.residualPath != null) { PrintWriter out = new PrintWriter(opts.residualPath); for (Instance instance : instances) { double pred = regressor.regress(instance); out.println(instance.getTarget() - pred); } out.flush(); out.close(); } break; case CLASSIFICATION: Classifier classifier = (Classifier) predictor; double error = Evaluator.evalError(classifier, instances); System.out.println("Error rate on Test: " + (error * 100) + " %"); if (opts.predictionPath != null) { if (opts.prob) { PrintWriter out = new PrintWriter(opts.predictionPath); ProbabilisticClassifier probClassifier = (ProbabilisticClassifier) predictor; for (Instance instance : instances) { double[] pred = probClassifier.predictProbabilities(instance); out.println(Arrays.toString(pred)); } out.flush(); out.close(); } else { PrintWriter out = new PrintWriter(opts.predictionPath); for (Instance instance : instances) { double pred = classifier.classify(instance); out.println((int) pred); } out.flush(); out.close(); } } if (opts.residualPath != null) { if (predictor instanceof Regressor) { PrintWriter out = new PrintWriter(opts.residualPath); Regressor regressingClassifier = (Regressor) predictor; for (Instance instance : instances) { double pred = regressingClassifier.regress(instance); int cls = (int) instance.getTarget(); out.println(OptimUtils.getPseudoResidual(pred, cls)); } out.flush(); out.close(); } else { System.out.println("Warning: Classifier does not support outputing pseudo residual."); } } break; default: break; } } }
package org.hl7.fhir.instance.model; /* Copyright (c) 2011+, HL7, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of HL7 nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ // Generated on Wed, Feb 18, 2015 12:09-0500 for FHIR v0.4.0 import java.util.*; import org.hl7.fhir.instance.model.annotations.ResourceDef; import org.hl7.fhir.instance.model.annotations.SearchParamDefinition; import org.hl7.fhir.instance.model.annotations.Block; import org.hl7.fhir.instance.model.annotations.Child; import org.hl7.fhir.instance.model.annotations.Description; /** * This resource provides the insurance eligibility details from the insurer regarding a specified coverage and optionally some class of service. */ @ResourceDef(name="EligibilityRequest", profile="http://hl7.org/fhir/Profile/EligibilityRequest") public class EligibilityRequest extends DomainResource { /** * The Response Business Identifier. */ @Child(name = "identifier", type = {Identifier.class}, order = 0, min = 0, max = Child.MAX_UNLIMITED) @Description(shortDefinition="Business Identifier", formalDefinition="The Response Business Identifier." ) protected List<Identifier> identifier; /** * The version of the style of resource contents. This should be mapped to the allowable profiles for this and supporting resources. */ @Child(name = "ruleset", type = {Coding.class}, order = 1, min = 0, max = 1) @Description(shortDefinition="Resource version", formalDefinition="The version of the style of resource contents. This should be mapped to the allowable profiles for this and supporting resources." ) protected Coding ruleset; /** * The style (standard) and version of the original material which was converted into this resource. */ @Child(name = "originalRuleset", type = {Coding.class}, order = 2, min = 0, max = 1) @Description(shortDefinition="Original version", formalDefinition="The style (standard) and version of the original material which was converted into this resource." ) protected Coding originalRuleset; /** * The date when this resource was created. */ @Child(name = "created", type = {DateTimeType.class}, order = 3, min = 0, max = 1) @Description(shortDefinition="Creation date", formalDefinition="The date when this resource was created." ) protected DateTimeType created; /** * The Insurer who is target of the request. */ @Child(name = "target", type = {Organization.class}, order = 4, min = 0, max = 1) @Description(shortDefinition="Insurer", formalDefinition="The Insurer who is target of the request." ) protected Reference target; /** * The actual object that is the target of the reference (The Insurer who is target of the request.) */ protected Organization targetTarget; /** * The practitioner who is responsible for the services rendered to the patient. */ @Child(name = "provider", type = {Practitioner.class}, order = 5, min = 0, max = 1) @Description(shortDefinition="Responsible practitioner", formalDefinition="The practitioner who is responsible for the services rendered to the patient." ) protected Reference provider; /** * The actual object that is the target of the reference (The practitioner who is responsible for the services rendered to the patient.) */ protected Practitioner providerTarget; /** * The organization which is responsible for the services rendered to the patient. */ @Child(name = "organization", type = {Organization.class}, order = 6, min = 0, max = 1) @Description(shortDefinition="Responsible organization", formalDefinition="The organization which is responsible for the services rendered to the patient." ) protected Reference organization; /** * The actual object that is the target of the reference (The organization which is responsible for the services rendered to the patient.) */ protected Organization organizationTarget; private static final long serialVersionUID = 1836339504L; public EligibilityRequest() { super(); } /** * @return {@link #identifier} (The Response Business Identifier.) */ public List<Identifier> getIdentifier() { if (this.identifier == null) this.identifier = new ArrayList<Identifier>(); return this.identifier; } public boolean hasIdentifier() { if (this.identifier == null) return false; for (Identifier item : this.identifier) if (!item.isEmpty()) return true; return false; } /** * @return {@link #identifier} (The Response Business Identifier.) */ // syntactic sugar public Identifier addIdentifier() { //3 Identifier t = new Identifier(); if (this.identifier == null) this.identifier = new ArrayList<Identifier>(); this.identifier.add(t); return t; } /** * @return {@link #ruleset} (The version of the style of resource contents. This should be mapped to the allowable profiles for this and supporting resources.) */ public Coding getRuleset() { if (this.ruleset == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create EligibilityRequest.ruleset"); else if (Configuration.doAutoCreate()) this.ruleset = new Coding(); // cc return this.ruleset; } public boolean hasRuleset() { return this.ruleset != null && !this.ruleset.isEmpty(); } /** * @param value {@link #ruleset} (The version of the style of resource contents. This should be mapped to the allowable profiles for this and supporting resources.) */ public EligibilityRequest setRuleset(Coding value) { this.ruleset = value; return this; } /** * @return {@link #originalRuleset} (The style (standard) and version of the original material which was converted into this resource.) */ public Coding getOriginalRuleset() { if (this.originalRuleset == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create EligibilityRequest.originalRuleset"); else if (Configuration.doAutoCreate()) this.originalRuleset = new Coding(); // cc return this.originalRuleset; } public boolean hasOriginalRuleset() { return this.originalRuleset != null && !this.originalRuleset.isEmpty(); } /** * @param value {@link #originalRuleset} (The style (standard) and version of the original material which was converted into this resource.) */ public EligibilityRequest setOriginalRuleset(Coding value) { this.originalRuleset = value; return this; } /** * @return {@link #created} (The date when this resource was created.). This is the underlying object with id, value and extensions. The accessor "getCreated" gives direct access to the value */ public DateTimeType getCreatedElement() { if (this.created == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create EligibilityRequest.created"); else if (Configuration.doAutoCreate()) this.created = new DateTimeType(); // bb return this.created; } public boolean hasCreatedElement() { return this.created != null && !this.created.isEmpty(); } public boolean hasCreated() { return this.created != null && !this.created.isEmpty(); } /** * @param value {@link #created} (The date when this resource was created.). This is the underlying object with id, value and extensions. The accessor "getCreated" gives direct access to the value */ public EligibilityRequest setCreatedElement(DateTimeType value) { this.created = value; return this; } /** * @return The date when this resource was created. */ public Date getCreated() { return this.created == null ? null : this.created.getValue(); } /** * @param value The date when this resource was created. */ public EligibilityRequest setCreated(Date value) { if (value == null) this.created = null; else { if (this.created == null) this.created = new DateTimeType(); this.created.setValue(value); } return this; } /** * @return {@link #target} (The Insurer who is target of the request.) */ public Reference getTarget() { if (this.target == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create EligibilityRequest.target"); else if (Configuration.doAutoCreate()) this.target = new Reference(); // cc return this.target; } public boolean hasTarget() { return this.target != null && !this.target.isEmpty(); } /** * @param value {@link #target} (The Insurer who is target of the request.) */ public EligibilityRequest setTarget(Reference value) { this.target = value; return this; } /** * @return {@link #target} The actual object that is the target of the reference. The reference library doesn't populate this, but you can use it to hold the resource if you resolve it. (The Insurer who is target of the request.) */ public Organization getTargetTarget() { if (this.targetTarget == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create EligibilityRequest.target"); else if (Configuration.doAutoCreate()) this.targetTarget = new Organization(); // aa return this.targetTarget; } /** * @param value {@link #target} The actual object that is the target of the reference. The reference library doesn't use these, but you can use it to hold the resource if you resolve it. (The Insurer who is target of the request.) */ public EligibilityRequest setTargetTarget(Organization value) { this.targetTarget = value; return this; } /** * @return {@link #provider} (The practitioner who is responsible for the services rendered to the patient.) */ public Reference getProvider() { if (this.provider == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create EligibilityRequest.provider"); else if (Configuration.doAutoCreate()) this.provider = new Reference(); // cc return this.provider; } public boolean hasProvider() { return this.provider != null && !this.provider.isEmpty(); } /** * @param value {@link #provider} (The practitioner who is responsible for the services rendered to the patient.) */ public EligibilityRequest setProvider(Reference value) { this.provider = value; return this; } /** * @return {@link #provider} The actual object that is the target of the reference. The reference library doesn't populate this, but you can use it to hold the resource if you resolve it. (The practitioner who is responsible for the services rendered to the patient.) */ public Practitioner getProviderTarget() { if (this.providerTarget == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create EligibilityRequest.provider"); else if (Configuration.doAutoCreate()) this.providerTarget = new Practitioner(); // aa return this.providerTarget; } /** * @param value {@link #provider} The actual object that is the target of the reference. The reference library doesn't use these, but you can use it to hold the resource if you resolve it. (The practitioner who is responsible for the services rendered to the patient.) */ public EligibilityRequest setProviderTarget(Practitioner value) { this.providerTarget = value; return this; } /** * @return {@link #organization} (The organization which is responsible for the services rendered to the patient.) */ public Reference getOrganization() { if (this.organization == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create EligibilityRequest.organization"); else if (Configuration.doAutoCreate()) this.organization = new Reference(); // cc return this.organization; } public boolean hasOrganization() { return this.organization != null && !this.organization.isEmpty(); } /** * @param value {@link #organization} (The organization which is responsible for the services rendered to the patient.) */ public EligibilityRequest setOrganization(Reference value) { this.organization = value; return this; } /** * @return {@link #organization} The actual object that is the target of the reference. The reference library doesn't populate this, but you can use it to hold the resource if you resolve it. (The organization which is responsible for the services rendered to the patient.) */ public Organization getOrganizationTarget() { if (this.organizationTarget == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create EligibilityRequest.organization"); else if (Configuration.doAutoCreate()) this.organizationTarget = new Organization(); // aa return this.organizationTarget; } /** * @param value {@link #organization} The actual object that is the target of the reference. The reference library doesn't use these, but you can use it to hold the resource if you resolve it. (The organization which is responsible for the services rendered to the patient.) */ public EligibilityRequest setOrganizationTarget(Organization value) { this.organizationTarget = value; return this; } protected void listChildren(List<Property> childrenList) { super.listChildren(childrenList); childrenList.add(new Property("identifier", "Identifier", "The Response Business Identifier.", 0, java.lang.Integer.MAX_VALUE, identifier)); childrenList.add(new Property("ruleset", "Coding", "The version of the style of resource contents. This should be mapped to the allowable profiles for this and supporting resources.", 0, java.lang.Integer.MAX_VALUE, ruleset)); childrenList.add(new Property("originalRuleset", "Coding", "The style (standard) and version of the original material which was converted into this resource.", 0, java.lang.Integer.MAX_VALUE, originalRuleset)); childrenList.add(new Property("created", "dateTime", "The date when this resource was created.", 0, java.lang.Integer.MAX_VALUE, created)); childrenList.add(new Property("target", "Reference(Organization)", "The Insurer who is target of the request.", 0, java.lang.Integer.MAX_VALUE, target)); childrenList.add(new Property("provider", "Reference(Practitioner)", "The practitioner who is responsible for the services rendered to the patient.", 0, java.lang.Integer.MAX_VALUE, provider)); childrenList.add(new Property("organization", "Reference(Organization)", "The organization which is responsible for the services rendered to the patient.", 0, java.lang.Integer.MAX_VALUE, organization)); } public EligibilityRequest copy() { EligibilityRequest dst = new EligibilityRequest(); copyValues(dst); if (identifier != null) { dst.identifier = new ArrayList<Identifier>(); for (Identifier i : identifier) dst.identifier.add(i.copy()); }; dst.ruleset = ruleset == null ? null : ruleset.copy(); dst.originalRuleset = originalRuleset == null ? null : originalRuleset.copy(); dst.created = created == null ? null : created.copy(); dst.target = target == null ? null : target.copy(); dst.provider = provider == null ? null : provider.copy(); dst.organization = organization == null ? null : organization.copy(); return dst; } protected EligibilityRequest typedCopy() { return copy(); } @Override public boolean equalsDeep(Base other) { if (!super.equalsDeep(other)) return false; if (!(other instanceof EligibilityRequest)) return false; EligibilityRequest o = (EligibilityRequest) other; return compareDeep(identifier, o.identifier, true) && compareDeep(ruleset, o.ruleset, true) && compareDeep(originalRuleset, o.originalRuleset, true) && compareDeep(created, o.created, true) && compareDeep(target, o.target, true) && compareDeep(provider, o.provider, true) && compareDeep(organization, o.organization, true); } @Override public boolean equalsShallow(Base other) { if (!super.equalsShallow(other)) return false; if (!(other instanceof EligibilityRequest)) return false; EligibilityRequest o = (EligibilityRequest) other; return compareValues(created, o.created, true); } public boolean isEmpty() { return super.isEmpty() && (identifier == null || identifier.isEmpty()) && (ruleset == null || ruleset.isEmpty()) && (originalRuleset == null || originalRuleset.isEmpty()) && (created == null || created.isEmpty()) && (target == null || target.isEmpty()) && (provider == null || provider.isEmpty()) && (organization == null || organization.isEmpty()) ; } @Override public ResourceType getResourceType() { return ResourceType.EligibilityRequest; } @SearchParamDefinition(name="identifier", path="EligibilityRequest.identifier", description="The business identifier of the Eligibility", type="token" ) public static final String SP_IDENTIFIER = "identifier"; }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * This is not the original file distributed by the Apache Software Foundation * It has been modified by the Hipparchus project */ package org.hipparchus.analysis.interpolation; import org.hipparchus.analysis.TrivariateFunction; import org.hipparchus.exception.LocalizedCoreFormats; import org.hipparchus.exception.MathIllegalArgumentException; import org.hipparchus.util.MathArrays; import org.hipparchus.util.MathUtils; /** * Function that implements the * <a href="http://en.wikipedia.org/wiki/Tricubic_interpolation"> * tricubic spline interpolation</a>, as proposed in * <blockquote> * Tricubic interpolation in three dimensions<br> * F. Lekien and J. Marsden<br> * <em>Int. J. Numer. Meth. Eng</em> 2005; <b>63</b>:455-471<br> * </blockquote> * */ public class TricubicInterpolatingFunction implements TrivariateFunction { /** * Matrix to compute the spline coefficients from the function values * and function derivatives values */ private static final double[][] AINV = { { 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { -3,3,0,0,0,0,0,0,-2,-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 2,-2,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-2,-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { -3,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,-2,0,-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,-3,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-2,0,-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 9,-9,-9,9,0,0,0,0,6,3,-6,-3,0,0,0,0,6,-6,3,-3,0,0,0,0,0,0,0,0,0,0,0,0,4,2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { -6,6,6,-6,0,0,0,0,-3,-3,3,3,0,0,0,0,-4,4,-2,2,0,0,0,0,0,0,0,0,0,0,0,0,-2,-2,-1,-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 2,0,-2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,2,0,-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { -6,6,6,-6,0,0,0,0,-4,-2,4,2,0,0,0,0,-3,3,-3,3,0,0,0,0,0,0,0,0,0,0,0,0,-2,-1,-2,-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 4,-4,-4,4,0,0,0,0,2,2,-2,-2,0,0,0,0,2,-2,2,-2,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-2,-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-3,3,0,0,0,0,0,0,-2,-1,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,-2,0,0,0,0,0,0,1,1,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-3,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-2,0,-1,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-3,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,-2,0,-1,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9,-9,-9,9,0,0,0,0,0,0,0,0,0,0,0,0,6,3,-6,-3,0,0,0,0,6,-6,3,-3,0,0,0,0,4,2,2,1,0,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-6,6,6,-6,0,0,0,0,0,0,0,0,0,0,0,0,-3,-3,3,3,0,0,0,0,-4,4,-2,2,0,0,0,0,-2,-2,-1,-1,0,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,-2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-6,6,6,-6,0,0,0,0,0,0,0,0,0,0,0,0,-4,-2,4,2,0,0,0,0,-3,3,-3,3,0,0,0,0,-2,-1,-2,-1,0,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,-4,-4,4,0,0,0,0,0,0,0,0,0,0,0,0,2,2,-2,-2,0,0,0,0,2,-2,2,-2,0,0,0,0,1,1,1,1,0,0,0,0 }, {-3,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-2,0,0,0,-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,-3,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-2,0,0,0,-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 9,-9,0,0,-9,9,0,0,6,3,0,0,-6,-3,0,0,0,0,0,0,0,0,0,0,6,-6,0,0,3,-3,0,0,0,0,0,0,0,0,0,0,4,2,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { -6,6,0,0,6,-6,0,0,-3,-3,0,0,3,3,0,0,0,0,0,0,0,0,0,0,-4,4,0,0,-2,2,0,0,0,0,0,0,0,0,0,0,-2,-2,0,0,-1,-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-3,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-2,0,0,0,-1,0,0,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-3,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-2,0,0,0,-1,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9,-9,0,0,-9,9,0,0,0,0,0,0,0,0,0,0,6,3,0,0,-6,-3,0,0,0,0,0,0,0,0,0,0,6,-6,0,0,3,-3,0,0,4,2,0,0,2,1,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-6,6,0,0,6,-6,0,0,0,0,0,0,0,0,0,0,-3,-3,0,0,3,3,0,0,0,0,0,0,0,0,0,0,-4,4,0,0,-2,2,0,0,-2,-2,0,0,-1,-1,0,0 }, { 9,0,-9,0,-9,0,9,0,0,0,0,0,0,0,0,0,6,0,3,0,-6,0,-3,0,6,0,-6,0,3,0,-3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,2,0,2,0,1,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,9,0,-9,0,-9,0,9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,0,3,0,-6,0,-3,0,6,0,-6,0,3,0,-3,0,0,0,0,0,0,0,0,0,4,0,2,0,2,0,1,0 }, { -27,27,27,-27,27,-27,-27,27,-18,-9,18,9,18,9,-18,-9,-18,18,-9,9,18,-18,9,-9,-18,18,18,-18,-9,9,9,-9,-12,-6,-6,-3,12,6,6,3,-12,-6,12,6,-6,-3,6,3,-12,12,-6,6,-6,6,-3,3,-8,-4,-4,-2,-4,-2,-2,-1 }, { 18,-18,-18,18,-18,18,18,-18,9,9,-9,-9,-9,-9,9,9,12,-12,6,-6,-12,12,-6,6,12,-12,-12,12,6,-6,-6,6,6,6,3,3,-6,-6,-3,-3,6,6,-6,-6,3,3,-3,-3,8,-8,4,-4,4,-4,2,-2,4,4,2,2,2,2,1,1 }, { -6,0,6,0,6,0,-6,0,0,0,0,0,0,0,0,0,-3,0,-3,0,3,0,3,0,-4,0,4,0,-2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-2,0,-2,0,-1,0,-1,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,-6,0,6,0,6,0,-6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-3,0,-3,0,3,0,3,0,-4,0,4,0,-2,0,2,0,0,0,0,0,0,0,0,0,-2,0,-2,0,-1,0,-1,0 }, { 18,-18,-18,18,-18,18,18,-18,12,6,-12,-6,-12,-6,12,6,9,-9,9,-9,-9,9,-9,9,12,-12,-12,12,6,-6,-6,6,6,3,6,3,-6,-3,-6,-3,8,4,-8,-4,4,2,-4,-2,6,-6,6,-6,3,-3,3,-3,4,2,4,2,2,1,2,1 }, { -12,12,12,-12,12,-12,-12,12,-6,-6,6,6,6,6,-6,-6,-6,6,-6,6,6,-6,6,-6,-8,8,8,-8,-4,4,4,-4,-3,-3,-3,-3,3,3,3,3,-4,-4,4,4,-2,-2,2,2,-4,4,-4,4,-2,2,-2,2,-2,-2,-2,-2,-1,-1,-1,-1 }, { 2,0,0,0,-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,2,0,0,0,-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { -6,6,0,0,6,-6,0,0,-4,-2,0,0,4,2,0,0,0,0,0,0,0,0,0,0,-3,3,0,0,-3,3,0,0,0,0,0,0,0,0,0,0,-2,-1,0,0,-2,-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 4,-4,0,0,-4,4,0,0,2,2,0,0,-2,-2,0,0,0,0,0,0,0,0,0,0,2,-2,0,0,2,-2,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-6,6,0,0,6,-6,0,0,0,0,0,0,0,0,0,0,-4,-2,0,0,4,2,0,0,0,0,0,0,0,0,0,0,-3,3,0,0,-3,3,0,0,-2,-1,0,0,-2,-1,0,0 }, { 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,-4,0,0,-4,4,0,0,0,0,0,0,0,0,0,0,2,2,0,0,-2,-2,0,0,0,0,0,0,0,0,0,0,2,-2,0,0,2,-2,0,0,1,1,0,0,1,1,0,0 }, { -6,0,6,0,6,0,-6,0,0,0,0,0,0,0,0,0,-4,0,-2,0,4,0,2,0,-3,0,3,0,-3,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-2,0,-1,0,-2,0,-1,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,-6,0,6,0,6,0,-6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-4,0,-2,0,4,0,2,0,-3,0,3,0,-3,0,3,0,0,0,0,0,0,0,0,0,-2,0,-1,0,-2,0,-1,0 }, { 18,-18,-18,18,-18,18,18,-18,12,6,-12,-6,-12,-6,12,6,12,-12,6,-6,-12,12,-6,6,9,-9,-9,9,9,-9,-9,9,8,4,4,2,-8,-4,-4,-2,6,3,-6,-3,6,3,-6,-3,6,-6,3,-3,6,-6,3,-3,4,2,2,1,4,2,2,1 }, { -12,12,12,-12,12,-12,-12,12,-6,-6,6,6,6,6,-6,-6,-8,8,-4,4,8,-8,4,-4,-6,6,6,-6,-6,6,6,-6,-4,-4,-2,-2,4,4,2,2,-3,-3,3,3,-3,-3,3,3,-4,4,-2,2,-4,4,-2,2,-2,-2,-1,-1,-2,-2,-1,-1 }, { 4,0,-4,0,-4,0,4,0,0,0,0,0,0,0,0,0,2,0,2,0,-2,0,-2,0,2,0,-2,0,2,0,-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,0,0,0,0,0,0,0,0,0 }, { 0,0,0,0,0,0,0,0,4,0,-4,0,-4,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,-2,0,-2,0,2,0,-2,0,2,0,-2,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,1,0 }, { -12,12,12,-12,12,-12,-12,12,-8,-4,8,4,8,4,-8,-4,-6,6,-6,6,6,-6,6,-6,-6,6,6,-6,-6,6,6,-6,-4,-2,-4,-2,4,2,4,2,-4,-2,4,2,-4,-2,4,2,-3,3,-3,3,-3,3,-3,3,-2,-1,-2,-1,-2,-1,-2,-1 }, { 8,-8,-8,8,-8,8,8,-8,4,4,-4,-4,-4,-4,4,4,4,-4,4,-4,-4,4,-4,4,4,-4,-4,4,4,-4,-4,4,2,2,2,2,-2,-2,-2,-2,2,2,-2,-2,2,2,-2,-2,2,-2,2,-2,2,-2,2,-2,1,1,1,1,1,1,1,1 } }; /** Samples x-coordinates */ private final double[] xval; /** Samples y-coordinates */ private final double[] yval; /** Samples z-coordinates */ private final double[] zval; /** Set of cubic splines pacthing the whole data grid */ private final TricubicFunction[][][] splines; /** * @param x Sample values of the x-coordinate, in increasing order. * @param y Sample values of the y-coordinate, in increasing order. * @param z Sample values of the y-coordinate, in increasing order. * @param f Values of the function on every grid point. * @param dFdX Values of the partial derivative of function with respect to x on every grid point. * @param dFdY Values of the partial derivative of function with respect to y on every grid point. * @param dFdZ Values of the partial derivative of function with respect to z on every grid point. * @param d2FdXdY Values of the cross partial derivative of function on every grid point. * @param d2FdXdZ Values of the cross partial derivative of function on every grid point. * @param d2FdYdZ Values of the cross partial derivative of function on every grid point. * @param d3FdXdYdZ Values of the cross partial derivative of function on every grid point. * @throws MathIllegalArgumentException if any of the arrays has zero length. * @throws MathIllegalArgumentException if the various arrays do not contain the expected number of elements. * @throws MathIllegalArgumentException if {@code x}, {@code y} or {@code z} are not strictly increasing. */ public TricubicInterpolatingFunction(double[] x, double[] y, double[] z, double[][][] f, double[][][] dFdX, double[][][] dFdY, double[][][] dFdZ, double[][][] d2FdXdY, double[][][] d2FdXdZ, double[][][] d2FdYdZ, double[][][] d3FdXdYdZ) throws MathIllegalArgumentException { final int xLen = x.length; final int yLen = y.length; final int zLen = z.length; if (xLen == 0 || yLen == 0 || z.length == 0 || f.length == 0 || f[0].length == 0) { throw new MathIllegalArgumentException(LocalizedCoreFormats.NO_DATA); } if (xLen != f.length) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, xLen, f.length); } if (xLen != dFdX.length) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, xLen, dFdX.length); } if (xLen != dFdY.length) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, xLen, dFdY.length); } if (xLen != dFdZ.length) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, xLen, dFdZ.length); } if (xLen != d2FdXdY.length) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, xLen, d2FdXdY.length); } if (xLen != d2FdXdZ.length) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, xLen, d2FdXdZ.length); } if (xLen != d2FdYdZ.length) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, xLen, d2FdYdZ.length); } if (xLen != d3FdXdYdZ.length) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, xLen, d3FdXdYdZ.length); } MathArrays.checkOrder(x); MathArrays.checkOrder(y); MathArrays.checkOrder(z); xval = x.clone(); yval = y.clone(); zval = z.clone(); final int lastI = xLen - 1; final int lastJ = yLen - 1; final int lastK = zLen - 1; splines = new TricubicFunction[lastI][lastJ][lastK]; for (int i = 0; i < lastI; i++) { if (f[i].length != yLen) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, f[i].length, yLen); } if (dFdX[i].length != yLen) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, dFdX[i].length, yLen); } if (dFdY[i].length != yLen) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, dFdY[i].length, yLen); } if (dFdZ[i].length != yLen) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, dFdZ[i].length, yLen); } if (d2FdXdY[i].length != yLen) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, d2FdXdY[i].length, yLen); } if (d2FdXdZ[i].length != yLen) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, d2FdXdZ[i].length, yLen); } if (d2FdYdZ[i].length != yLen) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, d2FdYdZ[i].length, yLen); } if (d3FdXdYdZ[i].length != yLen) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, d3FdXdYdZ[i].length, yLen); } final int ip1 = i + 1; final double xR = xval[ip1] - xval[i]; for (int j = 0; j < lastJ; j++) { if (f[i][j].length != zLen) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, f[i][j].length, zLen); } if (dFdX[i][j].length != zLen) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, dFdX[i][j].length, zLen); } if (dFdY[i][j].length != zLen) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, dFdY[i][j].length, zLen); } if (dFdZ[i][j].length != zLen) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, dFdZ[i][j].length, zLen); } if (d2FdXdY[i][j].length != zLen) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, d2FdXdY[i][j].length, zLen); } if (d2FdXdZ[i][j].length != zLen) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, d2FdXdZ[i][j].length, zLen); } if (d2FdYdZ[i][j].length != zLen) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, d2FdYdZ[i][j].length, zLen); } if (d3FdXdYdZ[i][j].length != zLen) { throw new MathIllegalArgumentException(LocalizedCoreFormats.DIMENSIONS_MISMATCH, d3FdXdYdZ[i][j].length, zLen); } final int jp1 = j + 1; final double yR = yval[jp1] - yval[j]; final double xRyR = xR * yR; for (int k = 0; k < lastK; k++) { final int kp1 = k + 1; final double zR = zval[kp1] - zval[k]; final double xRzR = xR * zR; final double yRzR = yR * zR; final double xRyRzR = xR * yRzR; final double[] beta = { f[i][j][k], f[ip1][j][k], f[i][jp1][k], f[ip1][jp1][k], f[i][j][kp1], f[ip1][j][kp1], f[i][jp1][kp1], f[ip1][jp1][kp1], dFdX[i][j][k] * xR, dFdX[ip1][j][k] * xR, dFdX[i][jp1][k] * xR, dFdX[ip1][jp1][k] * xR, dFdX[i][j][kp1] * xR, dFdX[ip1][j][kp1] * xR, dFdX[i][jp1][kp1] * xR, dFdX[ip1][jp1][kp1] * xR, dFdY[i][j][k] * yR, dFdY[ip1][j][k] * yR, dFdY[i][jp1][k] * yR, dFdY[ip1][jp1][k] * yR, dFdY[i][j][kp1] * yR, dFdY[ip1][j][kp1] * yR, dFdY[i][jp1][kp1] * yR, dFdY[ip1][jp1][kp1] * yR, dFdZ[i][j][k] * zR, dFdZ[ip1][j][k] * zR, dFdZ[i][jp1][k] * zR, dFdZ[ip1][jp1][k] * zR, dFdZ[i][j][kp1] * zR, dFdZ[ip1][j][kp1] * zR, dFdZ[i][jp1][kp1] * zR, dFdZ[ip1][jp1][kp1] * zR, d2FdXdY[i][j][k] * xRyR, d2FdXdY[ip1][j][k] * xRyR, d2FdXdY[i][jp1][k] * xRyR, d2FdXdY[ip1][jp1][k] * xRyR, d2FdXdY[i][j][kp1] * xRyR, d2FdXdY[ip1][j][kp1] * xRyR, d2FdXdY[i][jp1][kp1] * xRyR, d2FdXdY[ip1][jp1][kp1] * xRyR, d2FdXdZ[i][j][k] * xRzR, d2FdXdZ[ip1][j][k] * xRzR, d2FdXdZ[i][jp1][k] * xRzR, d2FdXdZ[ip1][jp1][k] * xRzR, d2FdXdZ[i][j][kp1] * xRzR, d2FdXdZ[ip1][j][kp1] * xRzR, d2FdXdZ[i][jp1][kp1] * xRzR, d2FdXdZ[ip1][jp1][kp1] * xRzR, d2FdYdZ[i][j][k] * yRzR, d2FdYdZ[ip1][j][k] * yRzR, d2FdYdZ[i][jp1][k] * yRzR, d2FdYdZ[ip1][jp1][k] * yRzR, d2FdYdZ[i][j][kp1] * yRzR, d2FdYdZ[ip1][j][kp1] * yRzR, d2FdYdZ[i][jp1][kp1] * yRzR, d2FdYdZ[ip1][jp1][kp1] * yRzR, d3FdXdYdZ[i][j][k] * xRyRzR, d3FdXdYdZ[ip1][j][k] * xRyRzR, d3FdXdYdZ[i][jp1][k] * xRyRzR, d3FdXdYdZ[ip1][jp1][k] * xRyRzR, d3FdXdYdZ[i][j][kp1] * xRyRzR, d3FdXdYdZ[ip1][j][kp1] * xRyRzR, d3FdXdYdZ[i][jp1][kp1] * xRyRzR, d3FdXdYdZ[ip1][jp1][kp1] * xRyRzR, }; splines[i][j][k] = new TricubicFunction(computeCoefficients(beta)); } } } } /** * {@inheritDoc} * * @throws MathIllegalArgumentException if any of the variables is outside its interpolation range. */ @Override public double value(double x, double y, double z) throws MathIllegalArgumentException { final int i = searchIndex(x, xval); if (i == -1) { throw new MathIllegalArgumentException(LocalizedCoreFormats.OUT_OF_RANGE_SIMPLE, x, xval[0], xval[xval.length - 1]); } final int j = searchIndex(y, yval); if (j == -1) { throw new MathIllegalArgumentException(LocalizedCoreFormats.OUT_OF_RANGE_SIMPLE, y, yval[0], yval[yval.length - 1]); } final int k = searchIndex(z, zval); if (k == -1) { throw new MathIllegalArgumentException(LocalizedCoreFormats.OUT_OF_RANGE_SIMPLE, z, zval[0], zval[zval.length - 1]); } final double xN = (x - xval[i]) / (xval[i + 1] - xval[i]); final double yN = (y - yval[j]) / (yval[j + 1] - yval[j]); final double zN = (z - zval[k]) / (zval[k + 1] - zval[k]); return splines[i][j][k].value(xN, yN, zN); } /** * Indicates whether a point is within the interpolation range. * * @param x First coordinate. * @param y Second coordinate. * @param z Third coordinate. * @return {@code true} if (x, y, z) is a valid point. */ public boolean isValidPoint(double x, double y, double z) { if (x < xval[0] || x > xval[xval.length - 1] || y < yval[0] || y > yval[yval.length - 1] || z < zval[0] || z > zval[zval.length - 1]) { return false; } else { return true; } } /** * @param c Coordinate. * @param val Coordinate samples. * @return the index in {@code val} corresponding to the interval containing {@code c}, or {@code -1} * if {@code c} is out of the range defined by the end values of {@code val}. */ private int searchIndex(double c, double[] val) { if (c < val[0]) { return -1; } final int max = val.length; for (int i = 1; i < max; i++) { if (c <= val[i]) { return i - 1; } } return -1; } /** * Compute the spline coefficients from the list of function values and * function partial derivatives values at the four corners of a grid * element. They must be specified in the following order: * <ul> * <li>f(0,0,0)</li> * <li>f(1,0,0)</li> * <li>f(0,1,0)</li> * <li>f(1,1,0)</li> * <li>f(0,0,1)</li> * <li>f(1,0,1)</li> * <li>f(0,1,1)</li> * <li>f(1,1,1)</li> * * <li>f<sub>x</sub>(0,0,0)</li> * <li>... <em>(same order as above)</em></li> * <li>f<sub>x</sub>(1,1,1)</li> * * <li>f<sub>y</sub>(0,0,0)</li> * <li>... <em>(same order as above)</em></li> * <li>f<sub>y</sub>(1,1,1)</li> * * <li>f<sub>z</sub>(0,0,0)</li> * <li>... <em>(same order as above)</em></li> * <li>f<sub>z</sub>(1,1,1)</li> * * <li>f<sub>xy</sub>(0,0,0)</li> * <li>... <em>(same order as above)</em></li> * <li>f<sub>xy</sub>(1,1,1)</li> * * <li>f<sub>xz</sub>(0,0,0)</li> * <li>... <em>(same order as above)</em></li> * <li>f<sub>xz</sub>(1,1,1)</li> * * <li>f<sub>yz</sub>(0,0,0)</li> * <li>... <em>(same order as above)</em></li> * <li>f<sub>yz</sub>(1,1,1)</li> * * <li>f<sub>xyz</sub>(0,0,0)</li> * <li>... <em>(same order as above)</em></li> * <li>f<sub>xyz</sub>(1,1,1)</li> * </ul> * where the subscripts indicate the partial derivative with respect to * the corresponding variable(s). * * @param beta List of function values and function partial derivatives values. * @return the spline coefficients. */ private double[] computeCoefficients(double[] beta) { final int sz = 64; final double[] a = new double[sz]; for (int i = 0; i < sz; i++) { double result = 0; final double[] row = AINV[i]; for (int j = 0; j < sz; j++) { result += row[j] * beta[j]; } a[i] = result; } return a; } } /** * 3D-spline function. * */ class TricubicFunction implements TrivariateFunction { /** Number of points. */ private static final short N = 4; /** Coefficients */ private final double[][][] a = new double[N][N][N]; /** * @param aV List of spline coefficients. */ TricubicFunction(double[] aV) { for (int i = 0; i < N; i++) { for (int j = 0; j < N; j++) { for (int k = 0; k < N; k++) { a[i][j][k] = aV[i + N * (j + N * k)]; } } } } /** * @param x x-coordinate of the interpolation point. * @param y y-coordinate of the interpolation point. * @param z z-coordinate of the interpolation point. * @return the interpolated value. * @throws MathIllegalArgumentException if {@code x}, {@code y} or * {@code z} are not in the interval {@code [0, 1]}. */ @Override public double value(double x, double y, double z) throws MathIllegalArgumentException { MathUtils.checkRangeInclusive(x, 0, 1); MathUtils.checkRangeInclusive(y, 0, 1); MathUtils.checkRangeInclusive(z, 0, 1); final double x2 = x * x; final double x3 = x2 * x; final double[] pX = { 1, x, x2, x3 }; final double y2 = y * y; final double y3 = y2 * y; final double[] pY = { 1, y, y2, y3 }; final double z2 = z * z; final double z3 = z2 * z; final double[] pZ = { 1, z, z2, z3 }; double result = 0; for (int i = 0; i < N; i++) { for (int j = 0; j < N; j++) { for (int k = 0; k < N; k++) { result += a[i][j][k] * pX[i] * pY[j] * pZ[k]; } } } return result; } }
/* * Copyright 2014 toxbee.se * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package se.toxbee.sleepfighter.activity; import android.annotation.TargetApi; import android.app.ActionBar; import android.os.Build; import android.os.Bundle; import android.preference.CheckBoxPreference; import android.preference.Preference; import android.preference.Preference.OnPreferenceChangeListener; import android.preference.PreferenceActivity; import android.preference.PreferenceCategory; import android.view.MenuItem; import se.toxbee.sleepfighter.R; import se.toxbee.sleepfighter.android.utils.ActivityUtils; import se.toxbee.sleepfighter.android.utils.ResourcesDynamicUtil; import se.toxbee.sleepfighter.challenge.ChallengeParamsReadWriter; import se.toxbee.sleepfighter.challenge.ChallengePrototypeDefinition; import se.toxbee.sleepfighter.challenge.ChallengePrototypeDefinition.ParameterDefinition; import se.toxbee.sleepfighter.challenge.ChallengePrototypeDefinition.PrimitiveValueType; import se.toxbee.sleepfighter.challenge.factory.ChallengeFactory; import se.toxbee.sleepfighter.helper.AlarmIntentHelper; import se.toxbee.sleepfighter.model.Alarm; import se.toxbee.sleepfighter.model.challenge.ChallengeConfigSet; import se.toxbee.sleepfighter.model.challenge.ChallengeType; import se.toxbee.sleepfighter.utils.string.StringUtils; /** * ChallengeParamsSettingsActivity is the activity for configuring the parameters/settings for one ChallengeType. * * @author Centril<twingoow@gmail.com> / Mazdak Farrokhzad. * @version 1.0 * @since Oct 4, 2013 */ public class ChallengeParamsSettingsActivity extends PreferenceActivity { public static final String EXTRAS_CHALLENGE_TYPE = "challenge_type"; private Alarm alarm; private ChallengeConfigSet challengeSet; private ChallengeParamsReadWriter readWriter; private ChallengePrototypeDefinition definition; private PreferenceCategory preferenceCategory; @TargetApi( Build.VERSION_CODES.HONEYCOMB ) private void setupActionBar() { if ( Build.VERSION.SDK_INT >= 11 ) { ActionBar actionBar = this.getActionBar(); actionBar.setTitle( this.getActivityTitle() ); ActivityUtils.setupStandardActionBar( this ); } } @Override protected void onCreate( Bundle savedInstanceState ) { super.onCreate( savedInstanceState ); this.alarm = AlarmIntentHelper.fetchAlarmOrPreset( this ); this.challengeSet = this.alarm.getChallengeSet(); this.fetchDefinition(); this.readWriter = new ChallengeParamsReadWriter( this.challengeSet, this.definition.getType() ); this.setupPreferenceCategory(); for ( ParameterDefinition paramDef : definition.get() ) { this.addPreference( paramDef ); } this.setupDependers(); this.setupActionBar(); } /** * Sets up any dependencies there might be. */ private void setupDependers() { for ( final ParameterDefinition paramDef : definition.get() ) { if(paramDef.getDependers() != null) { final Preference pref = findPreference(paramDef.getKey()); // Disable the dependent prefs if paramDef has been set to false. pref.setOnPreferenceChangeListener(null); pref.setOnPreferenceChangeListener(new OnPreferenceChangeListener() { @Override public boolean onPreferenceChange(Preference preference, Object newValue) { handleChange( preference, newValue ); final boolean b = (Boolean)newValue; for(final String depender : paramDef.getDependers()) { final Preference dependentPref = findPreference(depender); dependentPref.setEnabled(b); } return true; } }); // set the initial disabled value of the dependers, when you first enter this // preference menu. for(final String depender : paramDef.getDependers()) { final Preference dependentPref = findPreference(depender); dependentPref.setEnabled(this.readWriter.getBoolean( paramDef.getKey(), (Boolean) paramDef.getDefaultValue() )); } } } } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case android.R.id.home: finish(); return true; } return super.onOptionsItemSelected(item); } /** * Sets up the preference category to add preferences to. */ @SuppressWarnings( "deprecation" ) private void setupPreferenceCategory() { this.addPreferencesFromResource(R.xml.pref_alarm_challenge_params); this.preferenceCategory = (PreferenceCategory) this.findPreference("pref_challenge_param_category"); } /** * Fetches the challenge type from intent. */ private ChallengeType fetchChallengeType() { ChallengeType type = (ChallengeType) this.getIntent().getSerializableExtra( EXTRAS_CHALLENGE_TYPE ); if ( type == null ) { throw new IllegalArgumentException( "No ChallengeType was supplied!" ); } return type; } /** * Returns the definition for the handled challenge type. */ private void fetchDefinition() { this.definition = ChallengeFactory.getPrototypeDefinition( this.fetchChallengeType() ); } /** * Adds a preference for a given ParameterDefinition. * * @param paramDef the parameter definition. */ private void addPreference( ParameterDefinition paramDef ) { Preference preference = this.makePreference( paramDef ); // Make sure nothing is stored in SharedPreferences preference.setPersistent( false ); preference.setKey( paramDef.getKey() ); // Set title. String name = this.getParamTitle( paramDef ); preference.setTitle(name); // Set summary if available. String summary = this.getParamSummary( paramDef ); if ( summary != null ) { preference.setSummary( summary ); } // Set listener for change. preference.setOnPreferenceChangeListener(new OnPreferenceChangeListener() { @Override public boolean onPreferenceChange(Preference preference, Object newValue) { handleChange( preference, newValue ); return true; } }); this.preferenceCategory.addPreference( preference ); } /** * Makes a preference from a ParameterDefinition. * * @param paramDef the definition. * @return the preference. */ private Preference makePreference( ParameterDefinition paramDef ) { Preference preference = null; // Handle the various types. if ( paramDef.getType() instanceof PrimitiveValueType ) { preference = this.makePrimitivePreference( paramDef ); } return preference; } /** * Handles a change in preference. * * @param preference the preference that was changed. * @param newValue the new value. */ private void handleChange( Preference preference, Object newValue ) { String key = preference.getKey(); ParameterDefinition paramDef = this.definition.get( key ); if ( paramDef.getType() instanceof PrimitiveValueType ) { this.handlePrimitiveChange( key, paramDef, newValue ); } } /** * Creates a preference for primitive value types. * * @param paramDef the parameter definition. * @return the made preference. */ private Preference makePrimitivePreference( ParameterDefinition paramDef ) { Preference preference = null; switch ( (PrimitiveValueType) paramDef.getType() ) { case BOOLEAN: preference = new CheckBoxPreference(this); ((CheckBoxPreference) preference).setChecked( this.readWriter.getBoolean( paramDef.getKey(), (Boolean) paramDef.getDefaultValue() ) ); break; // TODO case DOUBLE: break; case FLOAT: break; case INTEGER: break; case LONG: break; case STRING: break; default: break; } return preference; } /** * Handles a change for a primitive value type. * * @param key the parameter key. * @param paramDef the parameter definition. * @param newValue the new value. */ private void handlePrimitiveChange( String key, ParameterDefinition paramDef, Object newValue ) { switch ( (PrimitiveValueType) paramDef.getType() ) { case BOOLEAN: this.readWriter.setBoolean( key, (Boolean) newValue ); break; // TODO case DOUBLE: break; case FLOAT: break; case INTEGER: break; case LONG: break; case STRING: break; default: break; } } /** * Returns the activity title to use from string values. * * @return the title. */ private String getActivityTitle() { return getChallengeString( "settings_title", true ); } /** * Returns the title for the parameter from string values. * * @param paramDef the parameter definition. * @return the title. */ private String getParamTitle( ParameterDefinition paramDef ) { return getChallengeString( paramDef.getKey() + "_title", true ); } /** * Returns the summary for the parameter from string values. * * @param paramDef the parameter definition. * @return the summary. */ private String getParamSummary( ParameterDefinition paramDef ) { return getChallengeString( paramDef.getKey() + "_summary", false ); } /** * Returns a string from resources for given challenge type & a case_string string needed. * * @param case_string the case as a string, e.g "title", "summary", etc. * @param checked whether or not to throw exception if string wasn't available. * @return the string. */ private String getChallengeString( String case_string, boolean checked ) { String name = "challenge_" + StringUtils.castLower( this.definition.getType().toString() ) + "_" + case_string; return checked ? ResourcesDynamicUtil.getStringCheck( name, this ) : ResourcesDynamicUtil.getString( name, this ); } @SuppressWarnings( "deprecation" ) public Preference findPreference( CharSequence key ) { return super.findPreference( key ); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.direct; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.util.concurrent.MoreExecutors; import java.util.Collection; import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import javax.annotation.Nullable; import org.apache.beam.runners.core.ExecutionContext; import org.apache.beam.runners.core.ReadyCheckingSideInputReader; import org.apache.beam.runners.core.SideInputReader; import org.apache.beam.runners.core.TimerInternals.TimerData; import org.apache.beam.runners.direct.CommittedResult.OutputType; import org.apache.beam.runners.direct.DirectGroupByKey.DirectGroupByKeyOnly; import org.apache.beam.runners.direct.WatermarkManager.FiredTimers; import org.apache.beam.runners.direct.WatermarkManager.TransformWatermarks; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.runners.AppliedPTransform; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.Trigger; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.PValue; import org.apache.beam.sdk.values.WindowingStrategy; import org.joda.time.Instant; /** * The evaluation context for a specific pipeline being executed by the * {@link DirectRunner}. Contains state shared within the execution across all * transforms. * * <p>{@link EvaluationContext} contains shared state for an execution of the * {@link DirectRunner} that can be used while evaluating a {@link PTransform}. This * consists of views into underlying state and watermark implementations, access to read and write * {@link PCollectionView PCollectionViews}, and managing the * {@link ExecutionContext ExecutionContexts}. This includes executing callbacks asynchronously when * state changes to the appropriate point (e.g. when a {@link PCollectionView} is requested and * known to be empty). * * <p>{@link EvaluationContext} also handles results by committing finalizing bundles based * on the current global state and updating the global state appropriately. This includes updating * the per-{@link StepAndKey} state, updating global watermarks, and executing any callbacks that * can be executed. */ class EvaluationContext { /** * The graph representing this {@link Pipeline}. */ private final DirectGraph graph; /** The options that were used to create this {@link Pipeline}. */ private final DirectOptions options; private final Clock clock; private final BundleFactory bundleFactory; /** The current processing time and event time watermarks and timers. */ private final WatermarkManager watermarkManager; /** Executes callbacks based on the progression of the watermark. */ private final WatermarkCallbackExecutor callbackExecutor; /** The stateInternals of the world, by applied PTransform and key. */ private final ConcurrentMap<StepAndKey, CopyOnAccessInMemoryStateInternals> applicationStateInternals; private final SideInputContainer sideInputContainer; private final DirectMetrics metrics; private final Set<PValue> keyedPValues; public static EvaluationContext create( DirectOptions options, Clock clock, BundleFactory bundleFactory, DirectGraph graph, Set<PValue> keyedPValues) { return new EvaluationContext(options, clock, bundleFactory, graph, keyedPValues); } private EvaluationContext( DirectOptions options, Clock clock, BundleFactory bundleFactory, DirectGraph graph, Set<PValue> keyedPValues) { this.options = checkNotNull(options); this.clock = clock; this.bundleFactory = checkNotNull(bundleFactory); this.graph = checkNotNull(graph); this.keyedPValues = keyedPValues; this.watermarkManager = WatermarkManager.create(clock, graph); this.sideInputContainer = SideInputContainer.create(this, graph.getViews()); this.applicationStateInternals = new ConcurrentHashMap<>(); this.metrics = new DirectMetrics(); this.callbackExecutor = WatermarkCallbackExecutor.create(MoreExecutors.directExecutor()); } public void initialize( Map<AppliedPTransform<?, ?, ?>, ? extends Iterable<CommittedBundle<?>>> initialInputs) { watermarkManager.initialize(initialInputs); } /** * Handle the provided {@link TransformResult}, produced after evaluating the provided * {@link CommittedBundle} (potentially null, if the result of a root {@link PTransform}). * * <p>The result is the output of running the transform contained in the * {@link TransformResult} on the contents of the provided bundle. * * @param completedBundle the bundle that was processed to produce the result. Potentially * {@code null} if the transform that produced the result is a root * transform * @param completedTimers the timers that were delivered to produce the {@code completedBundle}, * or an empty iterable if no timers were delivered * @param result the result of evaluating the input bundle * @return the committed bundles contained within the handled {@code result} */ public CommittedResult handleResult( @Nullable CommittedBundle<?> completedBundle, Iterable<TimerData> completedTimers, TransformResult<?> result) { Iterable<? extends CommittedBundle<?>> committedBundles = commitBundles(result.getOutputBundles()); metrics.commitLogical(completedBundle, result.getLogicalMetricUpdates()); // Update watermarks and timers EnumSet<OutputType> outputTypes = EnumSet.copyOf(result.getOutputTypes()); if (Iterables.isEmpty(committedBundles)) { outputTypes.remove(OutputType.BUNDLE); } else { outputTypes.add(OutputType.BUNDLE); } CommittedResult committedResult = CommittedResult.create(result, completedBundle == null ? null : completedBundle.withElements((Iterable) result.getUnprocessedElements()), committedBundles, outputTypes); // Update state internals CopyOnAccessInMemoryStateInternals theirState = result.getState(); if (theirState != null) { CopyOnAccessInMemoryStateInternals committedState = theirState.commit(); StepAndKey stepAndKey = StepAndKey.of( result.getTransform(), completedBundle == null ? null : completedBundle.getKey()); if (!committedState.isEmpty()) { applicationStateInternals.put(stepAndKey, committedState); } else { applicationStateInternals.remove(stepAndKey); } } // Watermarks are updated last to ensure visibility of any global state before progress is // permitted watermarkManager.updateWatermarks( completedBundle, result.getTimerUpdate().withCompletedTimers(completedTimers), committedResult, result.getWatermarkHold()); return committedResult; } private Iterable<? extends CommittedBundle<?>> commitBundles( Iterable<? extends UncommittedBundle<?>> bundles) { ImmutableList.Builder<CommittedBundle<?>> completed = ImmutableList.builder(); for (UncommittedBundle<?> inProgress : bundles) { AppliedPTransform<?, ?, ?> producing = graph.getProducer(inProgress.getPCollection()); TransformWatermarks watermarks = watermarkManager.getWatermarks(producing); CommittedBundle<?> committed = inProgress.commit(watermarks.getSynchronizedProcessingOutputTime()); // Empty bundles don't impact watermarks and shouldn't trigger downstream execution, so // filter them out if (!Iterables.isEmpty(committed.getElements())) { completed.add(committed); } } return completed.build(); } private void fireAllAvailableCallbacks() { for (AppliedPTransform<?, ?, ?> transform : graph.getPrimitiveTransforms()) { fireAvailableCallbacks(transform); } } private void fireAvailableCallbacks(AppliedPTransform<?, ?, ?> producingTransform) { TransformWatermarks watermarks = watermarkManager.getWatermarks(producingTransform); Instant outputWatermark = watermarks.getOutputWatermark(); callbackExecutor.fireForWatermark(producingTransform, outputWatermark); } /** * Create a {@link UncommittedBundle} for use by a source. */ public <T> UncommittedBundle<T> createRootBundle() { return bundleFactory.createRootBundle(); } /** * Create a {@link UncommittedBundle} whose elements belong to the specified {@link * PCollection}. */ public <T> UncommittedBundle<T> createBundle(PCollection<T> output) { return bundleFactory.createBundle(output); } /** * Create a {@link UncommittedBundle} with the specified keys at the specified step. For use by * {@link DirectGroupByKeyOnly} {@link PTransform PTransforms}. */ public <K, T> UncommittedBundle<T> createKeyedBundle( StructuralKey<K> key, PCollection<T> output) { return bundleFactory.createKeyedBundle(key, output); } /** * Indicate whether or not this {@link PCollection} has been determined to be * keyed. */ public <T> boolean isKeyed(PValue pValue) { return keyedPValues.contains(pValue); } /** * Create a {@link PCollectionViewWriter}, whose elements will be used in the provided * {@link PCollectionView}. */ public <ElemT, ViewT> PCollectionViewWriter<ElemT, ViewT> createPCollectionViewWriter( PCollection<Iterable<ElemT>> input, final PCollectionView<ViewT> output) { return new PCollectionViewWriter<ElemT, ViewT>() { @Override public void add(Iterable<WindowedValue<ElemT>> values) { sideInputContainer.write(output, values); } }; } /** * Schedule a callback to be executed after output would be produced for the given window * if there had been input. * * <p>Output would be produced when the watermark for a {@link PValue} passes the point at * which the trigger for the specified window (with the specified windowing strategy) must have * fired from the perspective of that {@link PValue}, as specified by the value of * {@link Trigger#getWatermarkThatGuaranteesFiring(BoundedWindow)} for the trigger of the * {@link WindowingStrategy}. When the callback has fired, either values will have been produced * for a key in that window, the window is empty, or all elements in the window are late. The * callback will be executed regardless of whether values have been produced. */ public void scheduleAfterOutputWouldBeProduced( PValue value, BoundedWindow window, WindowingStrategy<?, ?> windowingStrategy, Runnable runnable) { AppliedPTransform<?, ?, ?> producing = graph.getProducer(value); callbackExecutor.callOnGuaranteedFiring(producing, window, windowingStrategy, runnable); fireAvailableCallbacks(producing); } /** * Schedule a callback to be executed after the given window is expired. * * <p>For example, upstream state associated with the window may be cleared. */ public void scheduleAfterWindowExpiration( AppliedPTransform<?, ?, ?> producing, BoundedWindow window, WindowingStrategy<?, ?> windowingStrategy, Runnable runnable) { callbackExecutor.callOnWindowExpiration(producing, window, windowingStrategy, runnable); fireAvailableCallbacks(producing); } /** * Get the options used by this {@link Pipeline}. */ public DirectOptions getPipelineOptions() { return options; } /** * Get an {@link ExecutionContext} for the provided {@link AppliedPTransform} and key. */ public DirectExecutionContext getExecutionContext( AppliedPTransform<?, ?, ?> application, StructuralKey<?> key) { StepAndKey stepAndKey = StepAndKey.of(application, key); return new DirectExecutionContext( clock, key, (CopyOnAccessInMemoryStateInternals) applicationStateInternals.get(stepAndKey), watermarkManager.getWatermarks(application)); } /** * Get the Step Name for the provided application. */ String getStepName(AppliedPTransform<?, ?, ?> application) { return graph.getStepName(application); } /** Returns all of the steps in this {@link Pipeline}. */ Collection<AppliedPTransform<?, ?, ?>> getSteps() { return graph.getPrimitiveTransforms(); } /** * Returns a {@link ReadyCheckingSideInputReader} capable of reading the provided * {@link PCollectionView PCollectionViews}. * * @param sideInputs the {@link PCollectionView PCollectionViews} the result should be able to * read * @return a {@link SideInputReader} that can read all of the provided {@link PCollectionView * PCollectionViews} */ public ReadyCheckingSideInputReader createSideInputReader( final List<PCollectionView<?>> sideInputs) { return sideInputContainer.createReaderForViews(sideInputs); } /** Returns the metrics container for this pipeline. */ public DirectMetrics getMetrics() { return metrics; } @VisibleForTesting void forceRefresh() { watermarkManager.refreshAll(); fireAllAvailableCallbacks(); } /** * Extracts all timers that have been fired and have not already been extracted. * * <p>This is a destructive operation. Timers will only appear in the result of this method once * for each time they are set. */ public Collection<FiredTimers> extractFiredTimers() { forceRefresh(); return watermarkManager.extractFiredTimers(); } /** * Returns true if the step will not produce additional output. */ public boolean isDone(AppliedPTransform<?, ?, ?> transform) { // the PTransform is done only if watermark is at the max value Instant stepWatermark = watermarkManager.getWatermarks(transform).getOutputWatermark(); return !stepWatermark.isBefore(BoundedWindow.TIMESTAMP_MAX_VALUE); } /** * Returns true if all steps are done. */ public boolean isDone() { for (AppliedPTransform<?, ?, ?> transform : graph.getPrimitiveTransforms()) { if (!isDone(transform)) { return false; } } return true; } public Instant now() { return clock.now(); } Clock getClock() { return clock; } }
package com.github.neuralnetworks.training; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.IntStream; import com.github.neuralnetworks.architecture.ConnectionFactory; import com.github.neuralnetworks.architecture.Connections; import com.github.neuralnetworks.architecture.Layer; import com.github.neuralnetworks.architecture.NeuralNetwork; import com.github.neuralnetworks.architecture.NeuralNetworkImpl; import com.github.neuralnetworks.architecture.WeightsConnections; import com.github.neuralnetworks.architecture.types.DNN; import com.github.neuralnetworks.architecture.types.NNFactory; import com.github.neuralnetworks.architecture.types.RBM; import com.github.neuralnetworks.calculation.BreadthFirstOrderStrategy; import com.github.neuralnetworks.calculation.ConnectionCalculator; import com.github.neuralnetworks.calculation.LayerCalculatorImpl; import com.github.neuralnetworks.calculation.LayerOrderStrategy.ConnectionCandidate; import com.github.neuralnetworks.calculation.OutputError; import com.github.neuralnetworks.calculation.RBMLayerCalculator; import com.github.neuralnetworks.calculation.neuronfunctions.AparapiAveragePooling2D; import com.github.neuralnetworks.calculation.neuronfunctions.AparapiConv2DReLU; import com.github.neuralnetworks.calculation.neuronfunctions.AparapiConv2DSigmoid; import com.github.neuralnetworks.calculation.neuronfunctions.AparapiConv2DSoftReLU; import com.github.neuralnetworks.calculation.neuronfunctions.AparapiConv2DTanh; import com.github.neuralnetworks.calculation.neuronfunctions.AparapiMaxPooling2D; import com.github.neuralnetworks.calculation.neuronfunctions.AparapiMaxout; import com.github.neuralnetworks.calculation.neuronfunctions.AparapiReLU; import com.github.neuralnetworks.calculation.neuronfunctions.AparapiSigmoid; import com.github.neuralnetworks.calculation.neuronfunctions.AparapiSoftReLU; import com.github.neuralnetworks.calculation.neuronfunctions.AparapiStochasticPooling2D; import com.github.neuralnetworks.calculation.neuronfunctions.AparapiTanh; import com.github.neuralnetworks.calculation.neuronfunctions.BernoulliDistribution; import com.github.neuralnetworks.calculation.neuronfunctions.ConnectionCalculatorConv; import com.github.neuralnetworks.calculation.neuronfunctions.ConnectionCalculatorFullyConnected; import com.github.neuralnetworks.tensor.Tensor; import com.github.neuralnetworks.tensor.TensorFactory; import com.github.neuralnetworks.training.backpropagation.BackPropagationAutoencoder; import com.github.neuralnetworks.training.backpropagation.BackPropagationConv2D; import com.github.neuralnetworks.training.backpropagation.BackPropagationConv2DReLU; import com.github.neuralnetworks.training.backpropagation.BackPropagationConv2DSigmoid; import com.github.neuralnetworks.training.backpropagation.BackPropagationConv2DSoftReLU; import com.github.neuralnetworks.training.backpropagation.BackPropagationConv2DTanh; import com.github.neuralnetworks.training.backpropagation.BackPropagationLayerCalculatorImpl; import com.github.neuralnetworks.training.backpropagation.BackPropagationReLU; import com.github.neuralnetworks.training.backpropagation.BackPropagationSigmoid; import com.github.neuralnetworks.training.backpropagation.BackPropagationSoftReLU; import com.github.neuralnetworks.training.backpropagation.BackPropagationTanh; import com.github.neuralnetworks.training.backpropagation.BackPropagationTrainer; import com.github.neuralnetworks.training.backpropagation.BackpropagationAveragePooling2D; import com.github.neuralnetworks.training.backpropagation.BackpropagationMaxPooling2D; import com.github.neuralnetworks.training.backpropagation.BackpropagationMaxout; import com.github.neuralnetworks.training.backpropagation.MSEDerivative; import com.github.neuralnetworks.training.random.NNRandomInitializer; import com.github.neuralnetworks.training.rbm.AparapiCDTrainer; import com.github.neuralnetworks.training.rbm.DBNTrainer; import com.github.neuralnetworks.util.Constants; import com.github.neuralnetworks.util.Properties; import com.github.neuralnetworks.util.Util; /** * Factory for trainers */ public class TrainerFactory { /** * Backpropagation trainer Depends on the LayerCalculator of the network * * @param nn * @param trainingSet * @param testingSet * @param error * @param rand * @param learningRate * @param momentum * @param l1weightDecay * @return */ public static BackPropagationTrainer<?> backPropagation(NeuralNetworkImpl nn, TrainingInputProvider trainingSet, TrainingInputProvider testingSet, OutputError error, NNRandomInitializer rand, float learningRate, float momentum, float l1weightDecay, float l2weightDecay, float dropoutRate, int trainingBatchSize, int testBatchSize, int epochs) { Properties p = backpropProperties(nn, trainingSet, testingSet, error, rand, learningRate, momentum, l1weightDecay, l2weightDecay, dropoutRate, trainingBatchSize, testBatchSize, epochs); p.setParameter(Constants.BACKPROPAGATION, bplc(nn, p)); return new BackPropagationTrainer<NeuralNetwork>(p); } private static BackPropagationLayerCalculatorImpl bplc(NeuralNetworkImpl nn, Properties p) { BackPropagationLayerCalculatorImpl blc = new BackPropagationLayerCalculatorImpl(); LayerCalculatorImpl lc = (LayerCalculatorImpl) nn.getLayerCalculator(); List<ConnectionCandidate> connections = new BreadthFirstOrderStrategy(nn, nn.getOutputLayer()).order(); if (connections.size() > 0) { Layer current = null; List<Connections> chunk = new ArrayList<>(); Set<Layer> convCalculatedLayers = new HashSet<>(); // tracks // convolutional // layers // (because their // calculations // are // interlinked) convCalculatedLayers.add(nn.getOutputLayer()); for (int i = 0; i < connections.size(); i++) { ConnectionCandidate c = connections.get(i); chunk.add(c.connection); if (i == connections.size() - 1 || connections.get(i + 1).target != c.target) { current = c.target; ConnectionCalculator result = null; ConnectionCalculator ffcc = null; if (Util.isBias(current)) { ffcc = lc.getConnectionCalculator(current.getConnections().get(0).getOutputLayer()); } else if (Util.isConvolutional(current) || Util.isSubsampling(current)) { if (chunk.size() != 1) { throw new IllegalArgumentException("Convolutional layer with more than one connection"); } ffcc = lc.getConnectionCalculator(Util.getOppositeLayer(chunk.iterator().next(), current)); } else { ffcc = lc.getConnectionCalculator(current); } if (ffcc instanceof AparapiSigmoid) { result = new BackPropagationSigmoid(p); } else if (ffcc instanceof AparapiTanh) { result = new BackPropagationTanh(p); } else if (ffcc instanceof AparapiSoftReLU) { result = new BackPropagationSoftReLU(p); } else if (ffcc instanceof AparapiReLU) { result = new BackPropagationReLU(p); } else if (ffcc instanceof AparapiMaxout) { result = new BackpropagationMaxout(p); } else if (ffcc instanceof AparapiMaxPooling2D || ffcc instanceof AparapiStochasticPooling2D) { result = new BackpropagationMaxPooling2D(); } else if (ffcc instanceof AparapiAveragePooling2D) { result = new BackpropagationAveragePooling2D(); } else if (ffcc instanceof ConnectionCalculatorConv) { Layer opposite = Util.getOppositeLayer(chunk.iterator().next(), current); if (!convCalculatedLayers.contains(opposite)) { convCalculatedLayers.add(opposite); if (ffcc instanceof AparapiConv2DSigmoid) { result = new BackPropagationConv2DSigmoid(p); } else if (ffcc instanceof AparapiConv2DTanh) { result = new BackPropagationConv2DTanh(p); } else if (ffcc instanceof AparapiConv2DSoftReLU) { result = new BackPropagationConv2DSoftReLU(p); } else if (ffcc instanceof AparapiConv2DReLU) { result = new BackPropagationConv2DReLU(p); } } else { result = new BackPropagationConv2D(p); } } if (result != null) { blc.addConnectionCalculator(current, result); } chunk.clear(); } } } return blc; } public static BackPropagationAutoencoder backPropagationAutoencoder(NeuralNetworkImpl nn, TrainingInputProvider trainingSet, TrainingInputProvider testingSet, OutputError error, NNRandomInitializer rand, float learningRate, float momentum, float l1weightDecay, float l2weightDecay, float inputCorruptionRate, int trainingBatchSize, int testBatchSize, int epochs) { Properties p = backpropProperties(nn, trainingSet, testingSet, error, rand, learningRate, momentum, l1weightDecay, l2weightDecay, 0F, trainingBatchSize, testBatchSize, epochs); p.setParameter(Constants.CORRUPTION_LEVEL, inputCorruptionRate); p.setParameter(Constants.BACKPROPAGATION, bplc(nn, p)); return new BackPropagationAutoencoder(p); } protected static Properties backpropProperties(NeuralNetworkImpl nn, TrainingInputProvider trainingSet, TrainingInputProvider testingSet, OutputError error, NNRandomInitializer rand, float learningRate, float momentum, float l1weightDecay, float l2weightDecay, float dropoutRate, int trainingBatchSize, int testBatchSize, int epochs) { Properties p = new Properties(); p.setParameter(Constants.NEURAL_NETWORK, nn); p.setParameter(Constants.TRAINING_INPUT_PROVIDER, trainingSet); p.setParameter(Constants.TESTING_INPUT_PROVIDER, testingSet); p.setParameter(Constants.LEARNING_RATE, learningRate); p.setParameter(Constants.MOMENTUM, momentum); p.setParameter(Constants.L1_WEIGHT_DECAY, l1weightDecay); p.setParameter(Constants.L2_WEIGHT_DECAY, l2weightDecay); p.setParameter(Constants.OUTPUT_ERROR_DERIVATIVE, new MSEDerivative()); p.setParameter(Constants.WEIGHT_UDPATES, weightUpdates(nn)); p.setParameter(Constants.OUTPUT_ERROR, error); p.setParameter(Constants.RANDOM_INITIALIZER, rand); p.setParameter(Constants.TRAINING_BATCH_SIZE, trainingBatchSize); p.setParameter(Constants.TEST_BATCH_SIZE, testBatchSize); p.setParameter(Constants.EPOCHS, epochs); p.setParameter(Constants.DROPOUT_RATE, dropoutRate); return p; } public static AparapiCDTrainer cdSoftReLUTrainer(RBM rbm, TrainingInputProvider trainingSet, TrainingInputProvider testingSet, OutputError error, NNRandomInitializer rand, float learningRate, float momentum, float l1weightDecay, float l2weightDecay, int gibbsSampling, int trainingBatchSize, int epochs, boolean isPersistentCD) { rbm.setLayerCalculator(NNFactory.lcSoftRelu(rbm, null)); RBMLayerCalculator lc = NNFactory.rbmSoftReluSoftRelu(rbm, trainingBatchSize); ConnectionCalculatorFullyConnected cc = (ConnectionCalculatorFullyConnected) lc.getNegPhaseHiddenToVisibleCC(); cc.addPreTransferFunction(new BernoulliDistribution()); return new AparapiCDTrainer(rbmProperties(rbm, lc, trainingSet, testingSet, error, rand, learningRate, momentum, l1weightDecay, l2weightDecay, gibbsSampling, trainingBatchSize, epochs, isPersistentCD)); } public static AparapiCDTrainer cdSigmoidBinaryTrainer(RBM rbm, TrainingInputProvider trainingSet, TrainingInputProvider testingSet, OutputError error, NNRandomInitializer rand, float learningRate, float momentum, float l1weightDecay, float l2weightDecay, int gibbsSampling, int trainingBatchSize, int epochs, boolean isPersistentCD) { rbm.setLayerCalculator(NNFactory.lcSigmoid(rbm, null)); RBMLayerCalculator lc = NNFactory.rbmSigmoidSigmoid(rbm, trainingBatchSize); ConnectionCalculatorFullyConnected cc = (ConnectionCalculatorFullyConnected) lc.getNegPhaseHiddenToVisibleCC(); cc.addPreTransferFunction(new BernoulliDistribution()); return new AparapiCDTrainer(rbmProperties(rbm, lc, trainingSet, testingSet, error, rand, learningRate, momentum, l1weightDecay, l2weightDecay, gibbsSampling, trainingBatchSize, epochs, isPersistentCD)); } public static AparapiCDTrainer cdSigmoidTrainer(RBM rbm, TrainingInputProvider trainingSet, TrainingInputProvider testingSet, OutputError error, NNRandomInitializer rand, float learningRate, float momentum, float l1weightDecay, float l2weightDecay, int gibbsSampling, int trainingBatchSize, int epochs, boolean isPersistentCD) { rbm.setLayerCalculator(NNFactory.lcSigmoid(rbm, null)); RBMLayerCalculator lc = NNFactory.rbmSigmoidSigmoid(rbm, trainingBatchSize); return new AparapiCDTrainer(rbmProperties(rbm, lc, trainingSet, testingSet, error, rand, learningRate, momentum, l1weightDecay, l2weightDecay, gibbsSampling, trainingBatchSize, epochs, isPersistentCD)); } protected static Properties rbmProperties(RBM rbm, RBMLayerCalculator lc, TrainingInputProvider trainingSet, TrainingInputProvider testingSet, OutputError error, NNRandomInitializer rand, float learningRate, float momentum, float l1weightDecay, float l2weightDecay, int gibbsSampling, int trainingBatchSize, int epochs, boolean isPersistentCD) { Properties p = new Properties(); p.setParameter(Constants.NEURAL_NETWORK, rbm); p.setParameter(Constants.TRAINING_INPUT_PROVIDER, trainingSet); p.setParameter(Constants.TESTING_INPUT_PROVIDER, testingSet); p.setParameter(Constants.LEARNING_RATE, learningRate); p.setParameter(Constants.MOMENTUM, momentum); p.setParameter(Constants.L1_WEIGHT_DECAY, l1weightDecay); p.setParameter(Constants.L2_WEIGHT_DECAY, l2weightDecay); p.setParameter(Constants.GIBBS_SAMPLING_COUNT, gibbsSampling); p.setParameter(Constants.OUTPUT_ERROR, error); p.setParameter(Constants.RANDOM_INITIALIZER, rand); p.setParameter(Constants.PERSISTENT_CD, isPersistentCD); p.setParameter(Constants.LAYER_CALCULATOR, lc); p.setParameter(Constants.TRAINING_BATCH_SIZE, trainingBatchSize); p.setParameter(Constants.EPOCHS, epochs); return p; } public static DNNLayerTrainer dnnLayerTrainer(DNN<?> dnn, Map<NeuralNetwork, OneStepTrainer<?>> layerTrainers, TrainingInputProvider trainingSet, TrainingInputProvider testingSet, OutputError error) { return new DNNLayerTrainer(layerTrainerProperties(dnn, layerTrainers, trainingSet, testingSet, error)); } public static DBNTrainer dbnTrainer(DNN<?> dnn, Map<NeuralNetwork, OneStepTrainer<?>> layerTrainers, TrainingInputProvider trainingSet, TrainingInputProvider testingSet, OutputError error) { return new DBNTrainer(layerTrainerProperties(dnn, layerTrainers, trainingSet, testingSet, error)); } /** * @param nn * @return Weight update tensors */ public static Map<Connections, Tensor> weightUpdates(NeuralNetworkImpl nn) { Map<Connections, Tensor> result = new HashMap<>(); ConnectionFactory cf = nn.getProperties().getParameter(Constants.CONNECTION_FACTORY); List<Connections> connections = cf.getConnections().stream().filter(c -> c instanceof WeightsConnections).collect(Collectors.toList()); float[] elements = cf.useSharedWeights() ? new float[((WeightsConnections) connections.get(0)).getWeights().getElements().length] : null; IntStream.range(0, connections.size()).forEach(i -> result.put(connections.get(i), TensorFactory.duplicate(((WeightsConnections) connections.get(i)).getWeights(), elements))); return result; } protected static Properties layerTrainerProperties(DNN<?> dnn, Map<NeuralNetwork, OneStepTrainer<?>> layerTrainers, TrainingInputProvider trainingSet, TrainingInputProvider testingSet, OutputError error) { Properties p = new Properties(); p.setParameter(Constants.NEURAL_NETWORK, dnn); p.setParameter(Constants.TRAINING_INPUT_PROVIDER, trainingSet); p.setParameter(Constants.TESTING_INPUT_PROVIDER, testingSet); p.setParameter(Constants.OUTPUT_ERROR, error); p.setParameter(Constants.LAYER_TRAINERS, layerTrainers); return p; } }
package org.hisp.dhis.trackedentitydatavalue; /* * Copyright (c) 2004-2018, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import org.hisp.dhis.DhisSpringTest; import org.hisp.dhis.dataelement.DataElement; import org.hisp.dhis.dataelement.DataElementService; import org.hisp.dhis.organisationunit.OrganisationUnit; import org.hisp.dhis.organisationunit.OrganisationUnitService; import org.hisp.dhis.program.Program; import org.hisp.dhis.program.ProgramInstance; import org.hisp.dhis.program.ProgramInstanceService; import org.hisp.dhis.program.ProgramService; import org.hisp.dhis.program.ProgramStage; import org.hisp.dhis.program.ProgramStageInstance; import org.hisp.dhis.program.ProgramStageInstanceService; import org.hisp.dhis.program.ProgramStageService; import org.hisp.dhis.trackedentity.TrackedEntityInstance; import org.hisp.dhis.trackedentity.TrackedEntityInstanceService; import org.joda.time.DateTime; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; /** * @author Chau Thu Tran */ public class TrackedEntityDataValueServiceTest extends DhisSpringTest { @Autowired private TrackedEntityDataValueService dataValueService; @Autowired private TrackedEntityInstanceService entityInstanceService; @Autowired private ProgramService programService; @Autowired private ProgramStageService programStageService; @Autowired private ProgramInstanceService programInstanceService; @Autowired private ProgramStageInstanceService programStageInstanceService; @Autowired private OrganisationUnitService organisationUnitService; @Autowired private DataElementService dataElementService; private ProgramInstance programInstance; private ProgramStageInstance stageInstanceA; private ProgramStageInstance stageInstanceB; private DataElement dataElementA; private DataElement dataElementB; private TrackedEntityDataValue dataValueA; private TrackedEntityDataValue dataValueB; private TrackedEntityDataValue dataValueC; private TrackedEntityDataValue dataValueD; private Date yesterday; private Date tomorrow; private TrackedEntityInstance entityInstance; @Override public void setUpTest() { OrganisationUnit organisationUnit = createOrganisationUnit( 'A' ); organisationUnitService.addOrganisationUnit( organisationUnit ); dataElementA = createDataElement( 'A' ); dataElementB = createDataElement( 'B' ); dataElementService.addDataElement( dataElementA ); dataElementService.addDataElement( dataElementB ); entityInstance = createTrackedEntityInstance( 'A', organisationUnit ); entityInstanceService.addTrackedEntityInstance( entityInstance ); Program program = createProgram( 'A', new HashSet<>(), organisationUnit ); programService.addProgram( program ); ProgramStage stageA = createProgramStage( 'A', 0 ); stageA.setProgram( program ); programStageService.saveProgramStage( stageA ); ProgramStage stageB = createProgramStage( 'B', 0 ); stageB.setProgram( program ); programStageService.saveProgramStage( stageB ); Set<ProgramStage> programStages = new HashSet<>(); programStages.add( stageA ); programStages.add( stageB ); program.setProgramStages( programStages ); programService.updateProgram( program ); DateTime yesterDate = DateTime.now(); yesterDate.withTimeAtStartOfDay(); yesterDate.minusDays( 1 ); yesterday = yesterDate.toDate(); DateTime tomorrowDate = DateTime.now(); tomorrowDate.withTimeAtStartOfDay(); tomorrowDate.plusDays( 1 ); tomorrow = tomorrowDate.toDate(); programInstance = programInstanceService.enrollTrackedEntityInstance( entityInstance, program, yesterday, yesterday, organisationUnit ); stageInstanceA = programStageInstanceService.createProgramStageInstance( programInstance, stageA, yesterday, yesterday, organisationUnit ); stageInstanceB = programStageInstanceService.createProgramStageInstance( programInstance, stageB, yesterday, yesterday, organisationUnit ); dataValueA = new TrackedEntityDataValue( stageInstanceA, dataElementA, "1" ); dataValueB = new TrackedEntityDataValue( stageInstanceA, dataElementB, "2" ); dataValueC = new TrackedEntityDataValue( stageInstanceB, dataElementA, "3" ); dataValueD = new TrackedEntityDataValue( stageInstanceB, dataElementB, "4" ); } @Test public void testSaveTrackedEntityDataValue() { dataValueService.saveTrackedEntityDataValue( dataValueA ); dataValueService.saveTrackedEntityDataValue( dataValueB ); assertNotNull( dataValueService.getTrackedEntityDataValue( stageInstanceA, dataElementA ) ); assertNotNull( dataValueService.getTrackedEntityDataValue( stageInstanceA, dataElementB ) ); } @Test public void testDeleteTrackedEntityDataValue() { dataValueService.saveTrackedEntityDataValue( dataValueA ); dataValueService.saveTrackedEntityDataValue( dataValueB ); assertNotNull( dataValueService.getTrackedEntityDataValue( stageInstanceA, dataElementA ) ); assertNotNull( dataValueService.getTrackedEntityDataValue( stageInstanceA, dataElementB ) ); dataValueService.deleteTrackedEntityDataValue( dataValueA ); assertNull( dataValueService.getTrackedEntityDataValue( stageInstanceA, dataElementA ) ); assertNotNull( dataValueService.getTrackedEntityDataValue( stageInstanceA, dataElementB ) ); dataValueService.deleteTrackedEntityDataValue( dataValueB ); assertNull( dataValueService.getTrackedEntityDataValue( stageInstanceA, dataElementA ) ); assertNull( dataValueService.getTrackedEntityDataValue( stageInstanceA, dataElementB ) ); } @Test public void testUpdateTrackedEntityDataValue() { dataValueService.saveTrackedEntityDataValue( dataValueA ); assertNotNull( dataValueService.getTrackedEntityDataValue( stageInstanceA, dataElementA ) ); dataValueA.setValue( "2" ); dataValueService.updateTrackedEntityDataValue( dataValueA ); assertEquals( "2", dataValueService.getTrackedEntityDataValue( stageInstanceA, dataElementA ).getValue() ); } @Test public void testGetTrackedEntityDataValuesByStageInstance() { dataValueService.saveTrackedEntityDataValue( dataValueA ); dataValueService.saveTrackedEntityDataValue( dataValueB ); dataValueService.saveTrackedEntityDataValue( dataValueC ); dataValueService.saveTrackedEntityDataValue( dataValueD ); List<TrackedEntityDataValue> dataValues = dataValueService.getTrackedEntityDataValues( stageInstanceA ); assertEquals( 2, dataValues.size() ); assertTrue( dataValues.contains( dataValueA ) ); assertTrue( dataValues.contains( dataValueB ) ); dataValues = dataValueService.getTrackedEntityDataValues( stageInstanceB ); assertEquals( 2, dataValues.size() ); assertTrue( dataValues.contains( dataValueC ) ); assertTrue( dataValues.contains( dataValueD ) ); } @Test public void testGetTrackedEntityDataValuesByStageElement() { dataValueService.saveTrackedEntityDataValue( dataValueA ); dataValueService.saveTrackedEntityDataValue( dataValueB ); dataValueService.saveTrackedEntityDataValue( dataValueC ); dataValueService.saveTrackedEntityDataValue( dataValueD ); List<DataElement> dataElements = new ArrayList<>(); dataElements.add( dataElementA ); dataElements.add( dataElementB ); List<TrackedEntityDataValue> dataValues = dataValueService.getTrackedEntityDataValues( stageInstanceA, dataElements ); assertEquals( 2, dataValues.size() ); assertTrue( dataValues.contains( dataValueA ) ); assertTrue( dataValues.contains( dataValueB ) ); } @Test public void testGetTrackedEntityDataValues() { dataValueService.saveTrackedEntityDataValue( dataValueA ); dataValueService.saveTrackedEntityDataValue( dataValueB ); dataValueService.saveTrackedEntityDataValue( dataValueC ); dataValueService.saveTrackedEntityDataValue( dataValueD ); List<ProgramStageInstance> programStageInstances = new ArrayList<>(); programStageInstances.add( stageInstanceA ); programStageInstances.add( stageInstanceB ); List<TrackedEntityDataValue> dataValues = dataValueService .getTrackedEntityDataValues( programStageInstances ); assertEquals( 4, dataValues.size() ); assertTrue( dataValues.contains( dataValueA ) ); assertTrue( dataValues.contains( dataValueB ) ); assertTrue( dataValues.contains( dataValueC ) ); assertTrue( dataValues.contains( dataValueD ) ); } @Test public void testGetTrackedEntityDataValuesByDataElement() { dataValueService.saveTrackedEntityDataValue( dataValueA ); dataValueService.saveTrackedEntityDataValue( dataValueB ); dataValueService.saveTrackedEntityDataValue( dataValueC ); dataValueService.saveTrackedEntityDataValue( dataValueD ); List<TrackedEntityDataValue> dataValues = dataValueService.getTrackedEntityDataValues( dataElementA ); assertEquals( 2, dataValues.size() ); assertTrue( dataValues.contains( dataValueA ) ); assertTrue( dataValues.contains( dataValueC ) ); dataValues = dataValueService.getTrackedEntityDataValues( dataElementB ); assertEquals( 2, dataValues.size() ); assertTrue( dataValues.contains( dataValueB ) ); assertTrue( dataValues.contains( dataValueD ) ); } @Test public void testGetDataValuesByDataElement() { dataValueService.saveTrackedEntityDataValue( dataValueA ); dataValueService.saveTrackedEntityDataValue( dataValueB ); dataValueService.saveTrackedEntityDataValue( dataValueC ); dataValueService.saveTrackedEntityDataValue( dataValueD ); List<DataElement> dataElements = new ArrayList<>(); dataElements.add( dataElementA ); dataElements.add( dataElementB ); List<TrackedEntityDataValue> dataValues = dataValueService.getTrackedEntityDataValues( entityInstance, dataElements, yesterday, tomorrow ); dataValues = dataValueService.getTrackedEntityDataValues( dataElementB ); assertEquals( 2, dataValues.size() ); assertTrue( dataValues.contains( dataValueB ) ); assertTrue( dataValues.contains( dataValueD ) ); } @Test public void testGetDataValue() { dataValueService.saveTrackedEntityDataValue( dataValueA ); dataValueService.saveTrackedEntityDataValue( dataValueB ); TrackedEntityDataValue dataValue = dataValueService.getTrackedEntityDataValue( stageInstanceA, dataElementA ); assertEquals( dataValueA, dataValue ); dataValue = dataValueService.getTrackedEntityDataValue( stageInstanceA, dataElementB ); assertEquals( dataValueB, dataValue ); } }
package org.kududb.ts.core; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.MoreObjects; import com.google.common.primitives.Ints; import java.util.Arrays; import java.util.List; import javax.annotation.concurrent.NotThreadSafe; import org.apache.kudu.annotations.InterfaceAudience; /* * DO NOT EDIT * * Autogenerated from src/templates/Vec.java using the * src/templates/build script. */ /** * A vector of primitive ints. * * The vector is backed by a contiguous array, and offers efficient random * access. */ @InterfaceAudience.Private @NotThreadSafe final class IntVec { /** Default initial capacity for new vectors. */ @VisibleForTesting static final int DEFAULT_CAPACITY = 32; /** data backing the vector. */ private int[] data; /** offset of first unused element in data. */ private int len; private IntVec(int capacity) { data = new int[capacity]; len = 0; } private IntVec(int[] data) { this.data = data; this.len = data.length; } /** * Creates a new vector. * @return the new vector. */ public static IntVec create() { return new IntVec(DEFAULT_CAPACITY); } /** * Creates a new vector with the specified capacity. * @param capacity the initial capacity of the vector * @return a new vector with the specified capacity */ public static IntVec withCapacity(int capacity) { return new IntVec(capacity); } /** * Wrap an existing array with a vector. * The array should not be modified after this call. * @param data the initial data for the vector * @return a vector wrapping the data */ public static IntVec wrap(int[] data) { return new IntVec(data); } /** Returns the number of elements the vector can hold without reallocating. */ public int capacity() { return data.length; } /** Returns the number of elements in the vector. */ public int len() { return len; } /** Returns {@code true} if the vector is empty. */ public boolean isEmpty() { return len == 0; } /** * Reserves capacity for at least {@code additional} more elements to be * inserted into the vector. * The vector may reserve more space to avoid frequent reallocations. If the * vector already has sufficient capacity, no reallocation will happen. * * @param additional capacity to reserve */ public void reserve(int additional) { if (additional < 0) throw new IllegalArgumentException("negative additional"); if (data.length - len >= additional) return; reserveExact(Math.max(additional, (data.length - len) + data.length)); } /** * Reserves capacity for exactly {@code additional} more elements to be * inserted into the vector. * The vector may reserve more space to avoid frequent reallocations. If the * vector already has sufficient capacity, no reallocation will happen. * * @param additional capacity to reserve */ public void reserveExact(int additional) { if (len < 0) throw new IllegalArgumentException("negative additional"); if (data.length - len > additional) return; data = Arrays.copyOf(data, data.length + additional); } /** * Shrink the capacity of the vector to match the length. */ public void shrinkToFit() { if (len < data.length) data = Arrays.copyOf(data, len); } /** * Shorten the vector to be {@code len} elements long. * If {@code len} is greater than the vector's current length, * this has no effect. * @param len the new length of the vector */ public void truncate(int len) { if (len < 0) throw new IllegalArgumentException("negative len"); this.len = Math.min(this.len, len); } /** * Removes all elements from the vector. * No reallocation will be performed. */ public void clear() { truncate(0); } /** * Appends an element to the vector. * @param element the element to append */ public void push(int element) { reserve(1); data[len++] = element; } /** * Sets the element at {@code index} to the provided value. * @param index of the element to set * @param value to set the element to * @throws IndexOutOfBoundsException if {@code} index is not valid */ public void set(int index, int value) { if (index >= len) throw new IndexOutOfBoundsException(); data[index] = value; } /** * Concatenates another vector onto the end of this one. * @param other the other vector to concatenate onto this one */ public void concat(IntVec other) { reserveExact(other.len); System.arraycopy(other.data, 0, data, len, other.len); len += other.len; } /** * Returns the element at the specified position. * @param index of the element to return * @return the element at the specified position * @throws IndexOutOfBoundsException if the index is out of range */ public int get(int index) { if (index >= len) throw new IndexOutOfBoundsException(); return data[index]; } /** * Sorts the vector. */ public void sort() { Arrays.sort(data, 0, len); } /** * Merges another vector into this one, retaining sort order. * Both vectors must initially be sorted. The other vector will not be * modified. * @param other the vector to merge into this vector */ public void merge(IntVec other) { // http://www.programcreek.com/2012/12/leetcode-merge-sorted-array-java/ reserve(other.len()); int m = len; int n = other.len; while (m > 0 && n > 0) { if (data[m-1] > other.data[n-1]) { data[m+n-1] = data[m-1]; m--; } else { data[m+n-1] = other.data[n-1]; n--; } } while (n > 0) { data[m+n-1] = other.data[n-1]; n--; } len += other.len; } /** * Removes all values from this vector that are not contained in the other * vector. * Both vectors should initially be sorted. This vector will remain sorted. * The other vector will not be modified. Duplicate values in both vectors * will be preserved. * @param other the vector to intersect with this vector */ public void intersect(IntVec other) { int writeOffset = 0; int otherOffset = 0; for (int thisOffset = 0; thisOffset < len; thisOffset++) { int index = Arrays.binarySearch(other.data, otherOffset, other.len, data[thisOffset]); if (index < 0) { otherOffset = -index - 1; } else { data[writeOffset++] = other.data[index]; otherOffset++; // prevent matching the element in the other vector again. } } this.len = writeOffset; } /** * Removes consecutive repeated elements in the vector. * If the vector is sorted, this removes all duplicates. */ public void dedup() { if (len <= 1) return; int writeOffset = 1; for (int readOffset = 1; readOffset < len; readOffset++) { if (data[writeOffset - 1] != data[readOffset]) data[writeOffset++] = data[readOffset]; } len = writeOffset; } /** * Creates an iterator over this vector. * The vector should not be concurrently modified while the iterator is in use. * @return an iterator over the vector */ public Iterator iterator() { return new Iterator(); } /** * Returns a list view of the vector. * The vector should not be concurrently modified while the list is in use. * @return a list view of the vector */ public List<Integer> asList() { List<Integer> list = Ints.asList(data); if (len < data.length) return list.subList(0, len); return list; } /** {@inheritDoc} */ @Override public String toString() { if (len == 0) { return "[]"; } StringBuilder builder = new StringBuilder(len * 5); builder.append('['); builder.append(data[0]); for (int i = 1; i < len; i++) { builder.append(", ").append(data[i]); } builder.append(']'); return builder.toString(); } /** {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; IntVec other = (IntVec) o; if (len != other.len) return false; for (int i = 0; i < len; i++) if (data[i] != other.data[i]) return false; return true; } /** {@inheritDoc} */ @Override public int hashCode() { int result = len; for (int i = 0; i < len; i++) result = 31 * result + data[i]; return (int) result; } /** {@inheritDoc} */ @Override protected IntVec clone() { IntVec clone = new IntVec(0); clone.data = Arrays.copyOf(data, data.length); clone.len = len; return clone; } /** An iterator of primitive ints. */ public class Iterator { int index = 0; private Iterator() {} /** * Returns the next element in the iterator. * @return the next element */ public int next() { return data[index++]; } /** * Returns the next element in the iterator without changing the iterator's position. * @return the next element */ public int peek() { return data[index]; } /** * Returns {@code true} if the iterator contains another element. * @return {@code true} if the iterator has more elements */ public boolean hasNext() { return index < len; } /** * Seeks this iterator to the provided index. * @param index the index to seek to * @throws IndexOutOfBoundsException if the index is out of bounds of the vector */ public void seek(int index) { if (index < 0 || index > len) throw new IndexOutOfBoundsException("seek"); this.index = index; } /** * Seek to the first datapoint greater than or equal to the provided value. * @param value to seek to */ public void seekToValue(int value) { int offset = Arrays.binarySearch(data, value); index = offset >= 0 ? offset : -offset - 1; } /** * Get the iterator's current index in the vector. * @return the index */ public int getIndex() { return index; } /** {@inheritDoc} */ @Override public String toString() { return MoreObjects.toStringHelper(this) .add("index", index) .add("vec", IntVec.this) .toString(); } } }
/* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.common.data.persistence; import com.flowpowered.math.vector.Vector3i; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.reflect.TypeToken; import net.minecraft.block.Block; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.ResourceLocation; import org.apache.logging.log4j.Level; import org.spongepowered.api.block.BlockState; import org.spongepowered.api.block.BlockTypes; import org.spongepowered.api.block.tileentity.TileEntityArchetype; import org.spongepowered.api.block.tileentity.TileEntityType; import org.spongepowered.api.data.DataContainer; import org.spongepowered.api.data.DataQuery; import org.spongepowered.api.data.DataView; import org.spongepowered.api.data.persistence.DataTranslator; import org.spongepowered.api.data.persistence.DataTranslators; import org.spongepowered.api.data.persistence.InvalidDataException; import org.spongepowered.api.world.extent.worker.procedure.BlockVolumeVisitor; import org.spongepowered.api.world.schematic.BlockPalette; import org.spongepowered.api.world.schematic.Schematic; import org.spongepowered.common.SpongeImpl; import org.spongepowered.common.SpongeImplHooks; import org.spongepowered.common.block.BlockUtil; import org.spongepowered.common.block.SpongeTileEntityArchetypeBuilder; import org.spongepowered.common.data.util.DataQueries; import org.spongepowered.common.registry.type.block.TileEntityTypeRegistryModule; import org.spongepowered.common.util.gen.ArrayMutableBlockBuffer; import org.spongepowered.common.world.schematic.GlobalPalette; import org.spongepowered.common.world.schematic.SpongeSchematic; import java.util.List; import java.util.Map; import java.util.Optional; public class LegacySchematicTranslator implements DataTranslator<Schematic> { private static final LegacySchematicTranslator INSTANCE = new LegacySchematicTranslator(); private static final TypeToken<Schematic> TYPE_TOKEN = TypeToken.of(Schematic.class); private static final int MAX_SIZE = 65535; private static final DataQuery TILE_ID = DataQuery.of("id"); public static LegacySchematicTranslator get() { return INSTANCE; } private LegacySchematicTranslator() { } @Override public String getId() { return "sponge:legacy_schematic"; } @Override public String getName() { return "Legacy Schematic translator"; } @Override public TypeToken<Schematic> getToken() { return TYPE_TOKEN; } @Override public Schematic translate(DataView view) throws InvalidDataException { // We default to sponge as the assumption should be that if this tag // (which is not in the sponge schematic specification) is not present // then it is more likely that its a sponge schematic than a legacy // schematic String materials = view.getString(DataQueries.Schematic.LEGACY_MATERIALS).orElse("Sponge"); if ("Sponge".equalsIgnoreCase(materials)) { // not a legacy schematic use the new loader instead. return DataTranslators.SCHEMATIC.translate(view); } else if (!"Alpha".equalsIgnoreCase(materials)) { throw new InvalidDataException(String.format("Schematic specifies unknown materials %s", materials)); } int width = view.getShort(DataQueries.Schematic.WIDTH).get(); int height = view.getShort(DataQueries.Schematic.HEIGHT).get(); int length = view.getShort(DataQueries.Schematic.LENGTH).get(); if (width > MAX_SIZE || height > MAX_SIZE || length > MAX_SIZE) { throw new InvalidDataException(String.format( "Schematic is larger than maximum allowable size (found: (%d, %d, %d) max: (%d, %<d, %<d)", width, height, length, MAX_SIZE)); } int offsetX = view.getInt(DataQueries.Schematic.LEGACY_OFFSET_X).orElse(0); int offsetY = view.getInt(DataQueries.Schematic.LEGACY_OFFSET_Y).orElse(0); int offsetZ = view.getInt(DataQueries.Schematic.LEGACY_OFFSET_Z).orElse(0); BlockPalette palette = GlobalPalette.instance; ArrayMutableBlockBuffer buffer = new ArrayMutableBlockBuffer(new Vector3i(-offsetX, -offsetY, -offsetZ), new Vector3i(width, height, length)); byte[] block_ids = (byte[]) view.get(DataQueries.Schematic.LEGACY_BLOCKS).get(); byte[] block_data = (byte[]) view.get(DataQueries.Schematic.LEGACY_BLOCK_DATA).get(); byte[] add_block = (byte[]) view.get(DataQueries.Schematic.LEGACY_ADD_BLOCKS).orElse(null); for (int x = 0; x < width; x++) { for (int y = 0; y < height; y++) { for (int z = 0; z < length; z++) { int index = (y * length + z) * width + x; final int default_state_id = block_ids[index]; final int blockData = block_data[index] & 0xF; int palette_id = default_state_id << 4 | blockData; if (add_block != null) { palette_id |= add_block[index] << 12; } Optional<BlockState> blockState = palette.get(palette_id); if (!blockState.isPresent()) { // At the very least get the default state id blockState = Optional.of((BlockState) Block.REGISTRY.getObjectById(default_state_id)); } BlockState block = blockState.orElseGet(BlockTypes.COBBLESTONE::getDefaultState); buffer.setBlock(x - offsetX, y - offsetY, z - offsetZ, block); } } } Map<Vector3i, TileEntityArchetype> tiles = Maps.newHashMap(); List<DataView> tiledata = view.getViewList(DataQueries.Schematic.LEGACY_TILEDATA).orElse(null); if (tiledata != null) { for (DataView tile : tiledata) { int x = tile.getInt(DataQueries.X_POS).get(); int y = tile.getInt(DataQueries.Y_POS).get(); int z = tile.getInt(DataQueries.Z_POS).get(); final String tileType = tile.getString(TILE_ID).get(); final ResourceLocation name = new ResourceLocation(tileType); TileEntityType type = TileEntityTypeRegistryModule.getInstance() .getForClass(TileEntity.REGISTRY.getObject(name)); final BlockState state = buffer.getBlock(x - offsetX, y - offsetY, z - offsetZ); // Somehow we need to get some DataFixers in here, because some data may be legacy from older versions before data // fixers. if (type!= null && SpongeImplHooks.hasBlockTileEntity(((Block) state.getType()), BlockUtil.toNative(state))) { TileEntityArchetype archetype = new SpongeTileEntityArchetypeBuilder() .state(state) .tileData(tile) .tile(type) .build(); tiles.put(new Vector3i(x - offsetX, y - offsetY, z - offsetZ), archetype); } } } SpongeSchematic schematic = new SpongeSchematic(buffer, tiles); return schematic; } @Override public DataContainer translate(Schematic schematic) throws InvalidDataException { DataContainer data = DataContainer.createNew(DataView.SafetyMode.NO_DATA_CLONED); addTo(schematic, data); return data; } @Override public DataView addTo(Schematic schematic, DataView data) { final int xMin = schematic.getBlockMin().getX(); final int yMin = schematic.getBlockMin().getY(); final int zMin = schematic.getBlockMin().getZ(); final int width = schematic.getBlockSize().getX(); final int height = schematic.getBlockSize().getY(); final int length = schematic.getBlockSize().getZ(); if (width > MAX_SIZE || height > MAX_SIZE || length > MAX_SIZE) { throw new IllegalArgumentException(String.format( "Schematic is larger than maximum allowable size (found: (%d, %d, %d) max: (%d, %<d, %<d)", width, height, length, MAX_SIZE)); } data.set(DataQueries.Schematic.WIDTH, width); data.set(DataQueries.Schematic.HEIGHT, height); data.set(DataQueries.Schematic.LENGTH, length); data.set(DataQueries.Schematic.LEGACY_MATERIALS, "Alpha"); // These are added for better interop with WorldEdit data.set(DataQueries.Schematic.LEGACY_OFFSET_X, -xMin); data.set(DataQueries.Schematic.LEGACY_OFFSET_Y, -yMin); data.set(DataQueries.Schematic.LEGACY_OFFSET_Z, -zMin); SaveIterator itr = new SaveIterator(width, height, length); schematic.getBlockWorker().iterate(itr); byte[] blockids = itr.blockids; byte[] extraids = itr.extraids; byte[] blockdata = itr.blockdata; data.set(DataQueries.Schematic.LEGACY_BLOCKS, blockids); data.set(DataQueries.Schematic.LEGACY_BLOCK_DATA, blockdata); if (extraids != null) { data.set(DataQueries.Schematic.LEGACY_ADD_BLOCKS, extraids); } List<DataView> tileEntities = Lists.newArrayList(); for (Map.Entry<Vector3i, TileEntityArchetype> entry : schematic.getTileEntityArchetypes().entrySet()) { Vector3i pos = entry.getKey(); DataContainer tiledata = entry.getValue().getTileData(); tiledata.set(DataQueries.X_POS, pos.getX() - xMin); tiledata.set(DataQueries.Y_POS, pos.getY() - yMin); tiledata.set(DataQueries.Z_POS, pos.getZ() - zMin); tileEntities.add(tiledata); } data.set(DataQueries.Schematic.LEGACY_TILEDATA, tileEntities); return data; } private static class SaveIterator implements BlockVolumeVisitor<Schematic> { private final int width; private final int length; public byte[] blockids; public byte[] extraids; public byte[] blockdata; public SaveIterator(int width, int height, int length) { this.width = width; this.length = length; this.blockids = new byte[width * height * length]; this.extraids = null; this.blockdata = new byte[width * height * length]; } @Override public void visit(Schematic volume, int x, int y, int z) { int x0 = x - volume.getBlockMin().getX(); int y0 = y - volume.getBlockMin().getY(); int z0 = z - volume.getBlockMin().getZ(); int id = GlobalPalette.instance.get(volume.getBlock(x, y, z)).get(); int blockid = id >> 4; int dataid = id & 0xF; int index = (y0 * this.length + z0) * this.width + x0; this.blockids[index] = (byte) (blockid & 0xFF); if (blockid > 0xFF) { if (this.extraids == null) { this.extraids = new byte[(this.blockdata.length >> 2) + 1]; } this.extraids[index >> 1] = (byte) (((index & 1) == 0) ? this.extraids[index >> 1] & 0xF0 | (blockid >> 8) & 0xF : this.extraids[index >> 1] & 0xF | ((blockid >> 8) & 0xF) << 4); } this.blockdata[index] = (byte) dataid; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.ivy.ant; import java.io.File; import java.util.ArrayList; import java.util.Collections; import java.util.List; import junit.framework.TestCase; import org.apache.ivy.util.FileUtil; import org.apache.tools.ant.Project; import org.apache.tools.ant.types.FileSet; import org.apache.tools.ant.types.Path; // CheckStyle:MagicNumber| OFF // The test very often use MagicNumber. Using a constant is less expressive. public class IvyBuildListTest extends TestCase { private File cache; private Project project; private IvyBuildList buildlist; protected void setUp() throws Exception { createCache(); project = new Project(); project.init(); buildlist = new IvyBuildList(); buildlist.setProject(project); System.setProperty("ivy.cache.dir", cache.getAbsolutePath()); } protected void tearDown() throws Exception { cleanCache(); } private void cleanCache() { FileUtil.forceDelete(cache); } private void createCache() { cache = new File("build/cache"); cache.mkdirs(); } private String[] getFiles(IvyBuildList buildlist) { buildlist.setReference("ordered.build.files"); buildlist.execute(); Object o = buildlist.getProject().getReference("ordered.build.files"); assertNotNull(o); assertTrue(o instanceof Path); Path path = (Path) o; String[] files = path.list(); assertNotNull(files); return files; } private void assertListOfFiles(String prefix, String[] expected, String[] actual) { for (int i = 0; i < expected.length; i++) { assertEquals(new File(prefix + expected[i] + "/build.xml").getAbsolutePath(), new File( actual[i]).getAbsolutePath()); } } /* * Those tests use the ivy files A , B , C , D , E in test/buildlist The dependencies are : A -> * C B has no dependency C -> B D -> A , B E has no dependency F -> G G -> F */ public void testSimple() { FileSet fs = new FileSet(); fs.setDir(new File("test/buildlist")); fs.setIncludes("**/build.xml"); fs.setExcludes("E2/build.xml,F/build.xml,G/build.xml"); buildlist.addFileset(fs); buildlist.setOnMissingDescriptor("skip"); String[] files = getFiles(buildlist); assertEquals(5, files.length); assertListOfFiles("test/buildlist/", new String[] {"B", "C", "A", "D", "E"}, files); } public void testReverse() { FileSet fs = new FileSet(); fs.setDir(new File("test/buildlist")); fs.setIncludes("**/build.xml"); fs.setExcludes("E2/build.xml,F/build.xml,G/build.xml"); buildlist.addFileset(fs); buildlist.setReverse(true); buildlist.setOnMissingDescriptor("skip"); String[] files = getFiles(buildlist); assertEquals(5, files.length); assertListOfFiles("test/buildlist/", new String[] {"E", "D", "A", "C", "B"}, files); } public void testWithRoot() { FileSet fs = new FileSet(); fs.setDir(new File("test/buildlist")); fs.setIncludes("**/build.xml"); fs.setExcludes("E2/**"); buildlist.addFileset(fs); buildlist.setRoot("C"); buildlist.setOnMissingDescriptor("skip"); String[] files = getFiles(buildlist); assertEquals(2, files.length); // A and D should be filtered out assertListOfFiles("test/buildlist/", new String[] {"B", "C"}, files); } public void testWithRootCircular() { FileSet fs = new FileSet(); fs.setDir(new File("test/buildlist")); fs.setIncludes("**/build.xml"); buildlist.addFileset(fs); buildlist.setRoot("F"); buildlist.setOnMissingDescriptor("skip"); String[] files = getFiles(buildlist); assertEquals(2, files.length); // F and G should be in the list } public void testWithTwoRoots() { FileSet fs = new FileSet(); fs.setDir(new File("test/buildlist")); fs.setIncludes("**/build.xml"); fs.setExcludes("E2/**"); buildlist.addFileset(fs); buildlist.setRoot("C,E"); buildlist.setOnMissingDescriptor("skip"); String[] files = getFiles(buildlist); assertEquals(3, files.length); // A and D should be filtered out assertListOfFiles("test/buildlist/", new String[] {"B", "C", "E"}, files); } public void testWithRootExclude() { FileSet fs = new FileSet(); fs.setDir(new File("test/buildlist")); fs.setIncludes("**/build.xml"); fs.setExcludes("E2/**"); buildlist.addFileset(fs); buildlist.setRoot("C"); buildlist.setExcludeRoot(true); buildlist.setOnMissingDescriptor("skip"); String[] files = getFiles(buildlist); assertEquals(1, files.length); // A, D and C should be filtered out assertListOfFiles("test/buildlist/", new String[] {"B"}, files); } public void testWithRootAndOnlyDirectDep() { FileSet fs = new FileSet(); fs.setDir(new File("test/buildlist")); fs.setIncludes("**/build.xml"); fs.setExcludes("E2/**"); buildlist.addFileset(fs); buildlist.setRoot("A"); buildlist.setOnlydirectdep(true); buildlist.setOnMissingDescriptor("skip"); String[] files = getFiles(buildlist); assertEquals(2, files.length); // We should have only A and C assertListOfFiles("test/buildlist/", new String[] {"C", "A"}, files); } public void testWithLeaf() { FileSet fs = new FileSet(); fs.setDir(new File("test/buildlist")); fs.setIncludes("**/build.xml"); fs.setExcludes("E2/**"); buildlist.addFileset(fs); buildlist.setLeaf("C"); buildlist.setOnMissingDescriptor("skip"); String[] files = getFiles(buildlist); assertEquals(3, files.length); // B should be filtered out assertListOfFiles("test/buildlist/", new String[] {"C", "A", "D"}, files); } public void testWithLeafCircular() { FileSet fs = new FileSet(); fs.setDir(new File("test/buildlist")); fs.setIncludes("**/build.xml"); buildlist.addFileset(fs); buildlist.setLeaf("F"); buildlist.setOnMissingDescriptor("skip"); String[] files = getFiles(buildlist); assertEquals(2, files.length); } public void testWithTwoLeafs() { FileSet fs = new FileSet(); fs.setDir(new File("test/buildlist")); fs.setIncludes("**/build.xml"); fs.setExcludes("E2/**"); buildlist.addFileset(fs); buildlist.setLeaf("C,E"); buildlist.setOnMissingDescriptor("skip"); String[] files = getFiles(buildlist); assertEquals(4, files.length); // B should be filtered out assertListOfFiles("test/buildlist/", new String[] {"C", "A", "D", "E"}, files); } public void testWithLeafExclude() { FileSet fs = new FileSet(); fs.setDir(new File("test/buildlist")); fs.setIncludes("**/build.xml"); fs.setExcludes("E2/**"); buildlist.addFileset(fs); buildlist.setLeaf("C"); buildlist.setExcludeLeaf(true); buildlist.setOnMissingDescriptor("skip"); String[] files = getFiles(buildlist); assertEquals(2, files.length); // B and C should be filtered out assertListOfFiles("test/buildlist/", new String[] {"A", "D"}, files); } public void testWithLeafAndOnlyDirectDep() { FileSet fs = new FileSet(); fs.setDir(new File("test/buildlist")); fs.setIncludes("**/build.xml"); fs.setExcludes("E2/**"); buildlist.addFileset(fs); buildlist.setOnMissingDescriptor("skip"); buildlist.setLeaf("C"); buildlist.setOnlydirectdep(true); String[] files = getFiles(buildlist); assertEquals(2, files.length); // We must have only A and C assertListOfFiles("test/buildlist/", new String[] {"C", "A"}, files); } public void testRestartFrom() { FileSet fs = new FileSet(); fs.setDir(new File("test/buildlist")); fs.setIncludes("**/build.xml"); fs.setExcludes("E2/build.xml,F/build.xml,G/build.xml"); buildlist.addFileset(fs); buildlist.setOnMissingDescriptor("skip"); buildlist.setRestartFrom("C"); String[] files = getFiles(buildlist); assertEquals(4, files.length); assertListOfFiles("test/buildlist/", new String[] {"C", "A", "D", "E"}, files); } public void testOnMissingDescriptor() { FileSet fs = new FileSet(); fs.setDir(new File("test/buildlist")); fs.setIncludes("**/build.xml"); fs.setExcludes("E2/build.xml,F/build.xml,G/build.xml"); buildlist.addFileset(fs); buildlist.setOnMissingDescriptor(new String("tail")); // IVY-805: new String instance String[] files = getFiles(buildlist); assertEquals(6, files.length); assertListOfFiles("test/buildlist/", new String[] {"B", "C", "A", "D", "E", "H"}, files); } public void testOnMissingDescriptor2() { FileSet fs = new FileSet(); fs.setDir(new File("test/buildlist")); fs.setIncludes("**/build.xml"); fs.setExcludes("E2/build.xml,F/build.xml,G/build.xml"); buildlist.addFileset(fs); buildlist.setOnMissingDescriptor(new String("skip")); // IVY-805: new String instance String[] files = getFiles(buildlist); assertEquals(5, files.length); assertListOfFiles("test/buildlist/", new String[] {"B", "C", "A", "D", "E"}, files); } public void testWithModuleWithSameNameAndDifferentOrg() { FileSet fs = new FileSet(); fs.setDir(new File("test/buildlist")); fs.setIncludes("**/build.xml"); fs.setExcludes("F/build.xml,G/build.xml"); buildlist.addFileset(fs); buildlist.setOnMissingDescriptor("skip"); String[] files = getFiles(buildlist); assertEquals(6, files.length); assertListOfFiles("test/buildlist/", new String[] {"B", "C", "A", "D"}, files); // the order of E and E2 is undefined List other = new ArrayList(); other.add(new File(files[4]).getAbsoluteFile().toURI()); other.add(new File(files[5]).getAbsoluteFile().toURI()); Collections.sort(other); assertEquals(new File("test/buildlist/E/build.xml").getAbsoluteFile().toURI(), other.get(0)); assertEquals(new File("test/buildlist/E2/build.xml").getAbsoluteFile().toURI(), other.get(1)); } public void testNoParents() { FileSet fs = new FileSet(); fs.setDir(new File("test/buildlists/testNoParents")); fs.setIncludes("**/build.xml"); buildlist.addFileset(fs); buildlist.setOnMissingDescriptor("skip"); buildlist.setHaltonerror(false); String[] files = getFiles(buildlist); assertEquals(5, files.length); assertListOfFiles("test/buildlists/testNoParents/", new String[] {"bootstrap-parent", "ireland", "germany", "master-parent", "croatia"}, files); } public void testOneParent() { FileSet fs = new FileSet(); fs.setDir(new File("test/buildlists/testOneParent")); fs.setIncludes("**/build.xml"); buildlist.addFileset(fs); buildlist.setOnMissingDescriptor("skip"); buildlist.setHaltonerror(false); String[] files = getFiles(buildlist); assertEquals(5, files.length); assertListOfFiles("test/buildlists/testOneParent/", new String[] {"bootstrap-parent", "master-parent", "croatia", "ireland", "germany"}, files); } public void testTwoParents() { FileSet fs = new FileSet(); fs.setDir(new File("test/buildlists/testTwoParents")); fs.setIncludes("**/build.xml"); buildlist.addFileset(fs); buildlist.setOnMissingDescriptor("skip"); buildlist.setHaltonerror(false); String[] files = getFiles(buildlist); assertEquals(5, files.length); assertListOfFiles("test/buildlists/testTwoParents/", new String[] {"bootstrap-parent", "master-parent", "croatia", "ireland", "germany"}, files); } public void testRelativePathToParent() { FileSet fs = new FileSet(); fs.setDir(new File("test/buildlists/testRelativePathToParent")); fs.setIncludes("**/build.xml"); buildlist.addFileset(fs); buildlist.setOnMissingDescriptor("skip"); buildlist.setHaltonerror(false); String[] files = getFiles(buildlist); assertEquals(5, files.length); assertListOfFiles("test/buildlists/testRelativePathToParent/", new String[] { "bootstrap-parent", "master-parent", "croatia", "ireland", "germany"}, files); } public void testAbsolutePathToParent() { project.setProperty("master-parent.dir", new File("test/buildlists/testAbsolutePathToParent/master-parent").getAbsolutePath()); FileSet fs = new FileSet(); fs.setDir(new File("test/buildlists/testAbsolutePathToParent")); fs.setIncludes("**/build.xml"); buildlist.addFileset(fs); buildlist.setOnMissingDescriptor("skip"); buildlist.setHaltonerror(false); String[] files = getFiles(buildlist); assertEquals(5, files.length); assertListOfFiles("test/buildlists/testAbsolutePathToParent/", new String[] { "bootstrap-parent", "master-parent", "croatia", "ireland", "germany"}, files); } } // CheckStyle:MagicNumber| ON
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.mapper.MetadataFieldMapper.TypeParser; import org.elasticsearch.index.mapper.internal.AllFieldMapper; import org.elasticsearch.index.mapper.internal.IdFieldMapper; import org.elasticsearch.index.mapper.internal.IndexFieldMapper; import org.elasticsearch.index.mapper.internal.ParentFieldMapper; import org.elasticsearch.index.mapper.internal.RoutingFieldMapper; import org.elasticsearch.index.mapper.internal.SourceFieldMapper; import org.elasticsearch.index.mapper.internal.TTLFieldMapper; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.RootObjectMapper; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; import static java.util.Collections.emptyMap; /** * */ public class DocumentMapper implements ToXContent { public static class Builder { private Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappers = new LinkedHashMap<>(); private final RootObjectMapper rootObjectMapper; private Map<String, Object> meta = emptyMap(); private final Mapper.BuilderContext builderContext; public Builder(RootObjectMapper.Builder builder, MapperService mapperService) { final Settings indexSettings = mapperService.getIndexSettings().getSettings(); this.builderContext = new Mapper.BuilderContext(indexSettings, new ContentPath(1)); this.rootObjectMapper = builder.build(builderContext); final String type = rootObjectMapper.name(); DocumentMapper existingMapper = mapperService.documentMapper(type); for (Map.Entry<String, MetadataFieldMapper.TypeParser> entry : mapperService.mapperRegistry.getMetadataMapperParsers().entrySet()) { final String name = entry.getKey(); final MetadataFieldMapper existingMetadataMapper = existingMapper == null ? null : (MetadataFieldMapper) existingMapper.mappers().getMapper(name); final MetadataFieldMapper metadataMapper; if (existingMetadataMapper == null) { final TypeParser parser = entry.getValue(); metadataMapper = parser.getDefault(indexSettings, mapperService.fullName(name), builder.name()); } else { metadataMapper = existingMetadataMapper; } metadataMappers.put(metadataMapper.getClass(), metadataMapper); } } public Builder meta(Map<String, Object> meta) { this.meta = meta; return this; } public Builder put(MetadataFieldMapper.Builder<?, ?> mapper) { MetadataFieldMapper metadataMapper = mapper.build(builderContext); metadataMappers.put(metadataMapper.getClass(), metadataMapper); return this; } public DocumentMapper build(MapperService mapperService) { Objects.requireNonNull(rootObjectMapper, "Mapper builder must have the root object mapper set"); Mapping mapping = new Mapping( mapperService.getIndexSettings().getIndexVersionCreated(), rootObjectMapper, metadataMappers.values().toArray(new MetadataFieldMapper[metadataMappers.values().size()]), meta); return new DocumentMapper(mapperService, mapping); } } private final MapperService mapperService; private final String type; private final Text typeText; private final CompressedXContent mappingSource; private final Mapping mapping; private final DocumentParser documentParser; private final DocumentFieldMappers fieldMappers; private final Map<String, ObjectMapper> objectMappers; private final boolean hasNestedObjects; public DocumentMapper(MapperService mapperService, Mapping mapping) { this.mapperService = mapperService; this.type = mapping.root().name(); this.typeText = new Text(this.type); final IndexSettings indexSettings = mapperService.getIndexSettings(); this.mapping = mapping; this.documentParser = new DocumentParser(indexSettings, mapperService.documentMapperParser(), this); if (metadataMapper(ParentFieldMapper.class).active()) { // mark the routing field mapper as required metadataMapper(RoutingFieldMapper.class).markAsRequired(); } // collect all the mappers for this type List<ObjectMapper> newObjectMappers = new ArrayList<>(); List<FieldMapper> newFieldMappers = new ArrayList<>(); for (MetadataFieldMapper metadataMapper : this.mapping.metadataMappers) { if (metadataMapper instanceof FieldMapper) { newFieldMappers.add(metadataMapper); } } MapperUtils.collect(this.mapping.root, newObjectMappers, newFieldMappers); final AnalysisService analysisService = mapperService.analysisService(); this.fieldMappers = new DocumentFieldMappers(newFieldMappers, analysisService.defaultIndexAnalyzer(), analysisService.defaultSearchAnalyzer(), analysisService.defaultSearchQuoteAnalyzer()); Map<String, ObjectMapper> builder = new HashMap<>(); for (ObjectMapper objectMapper : newObjectMappers) { ObjectMapper previous = builder.put(objectMapper.fullPath(), objectMapper); if (previous != null) { throw new IllegalStateException("duplicate key " + objectMapper.fullPath() + " encountered"); } } boolean hasNestedObjects = false; this.objectMappers = Collections.unmodifiableMap(builder); for (ObjectMapper objectMapper : newObjectMappers) { if (objectMapper.nested().isNested()) { hasNestedObjects = true; } } this.hasNestedObjects = hasNestedObjects; try { mappingSource = new CompressedXContent(this, XContentType.JSON, ToXContent.EMPTY_PARAMS); } catch (Exception e) { throw new ElasticsearchGenerationException("failed to serialize source for type [" + type + "]", e); } } public Mapping mapping() { return mapping; } public String type() { return this.type; } public Text typeText() { return this.typeText; } public Map<String, Object> meta() { return mapping.meta; } public CompressedXContent mappingSource() { return this.mappingSource; } public RootObjectMapper root() { return mapping.root; } public UidFieldMapper uidMapper() { return metadataMapper(UidFieldMapper.class); } @SuppressWarnings({"unchecked"}) public <T extends MetadataFieldMapper> T metadataMapper(Class<T> type) { return mapping.metadataMapper(type); } public IndexFieldMapper indexMapper() { return metadataMapper(IndexFieldMapper.class); } public TypeFieldMapper typeMapper() { return metadataMapper(TypeFieldMapper.class); } public SourceFieldMapper sourceMapper() { return metadataMapper(SourceFieldMapper.class); } public AllFieldMapper allFieldMapper() { return metadataMapper(AllFieldMapper.class); } public IdFieldMapper idFieldMapper() { return metadataMapper(IdFieldMapper.class); } public RoutingFieldMapper routingFieldMapper() { return metadataMapper(RoutingFieldMapper.class); } public ParentFieldMapper parentFieldMapper() { return metadataMapper(ParentFieldMapper.class); } public TimestampFieldMapper timestampFieldMapper() { return metadataMapper(TimestampFieldMapper.class); } public TTLFieldMapper TTLFieldMapper() { return metadataMapper(TTLFieldMapper.class); } public IndexFieldMapper IndexFieldMapper() { return metadataMapper(IndexFieldMapper.class); } public Query typeFilter() { return typeMapper().fieldType().termQuery(type, null); } public boolean hasNestedObjects() { return hasNestedObjects; } public DocumentFieldMappers mappers() { return this.fieldMappers; } public Map<String, ObjectMapper> objectMappers() { return this.objectMappers; } public ParsedDocument parse(String index, String type, String id, BytesReference source) throws MapperParsingException { return parse(SourceToParse.source(source).index(index).type(type).id(id)); } public ParsedDocument parse(SourceToParse source) throws MapperParsingException { return documentParser.parseDocument(source); } /** * Returns the best nested {@link ObjectMapper} instances that is in the scope of the specified nested docId. */ public ObjectMapper findNestedObjectMapper(int nestedDocId, SearchContext sc, LeafReaderContext context) throws IOException { ObjectMapper nestedObjectMapper = null; for (ObjectMapper objectMapper : objectMappers().values()) { if (!objectMapper.nested().isNested()) { continue; } Query filter = objectMapper.nestedTypeFilter(); if (filter == null) { continue; } // We can pass down 'null' as acceptedDocs, because nestedDocId is a doc to be fetched and // therefor is guaranteed to be a live doc. final Weight nestedWeight = filter.createWeight(sc.searcher(), false); Scorer scorer = nestedWeight.scorer(context); if (scorer == null) { continue; } if (scorer.iterator().advance(nestedDocId) == nestedDocId) { if (nestedObjectMapper == null) { nestedObjectMapper = objectMapper; } else { if (nestedObjectMapper.fullPath().length() < objectMapper.fullPath().length()) { nestedObjectMapper = objectMapper; } } } } return nestedObjectMapper; } /** * Returns the parent {@link ObjectMapper} instance of the specified object mapper or <code>null</code> if there * isn't any. */ // TODO: We should add: ObjectMapper#getParentObjectMapper() public ObjectMapper findParentObjectMapper(ObjectMapper objectMapper) { int indexOfLastDot = objectMapper.fullPath().lastIndexOf('.'); if (indexOfLastDot != -1) { String parentNestObjectPath = objectMapper.fullPath().substring(0, indexOfLastDot); return objectMappers().get(parentNestObjectPath); } else { return null; } } public boolean isParent(String type) { return mapperService.getParentTypes().contains(type); } public DocumentMapper merge(Mapping mapping, boolean updateAllTypes) { Mapping merged = this.mapping.merge(mapping, updateAllTypes); return new DocumentMapper(mapperService, merged); } /** * Recursively update sub field types. */ public DocumentMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) { Mapping updated = this.mapping.updateFieldType(fullNameToFieldType); return new DocumentMapper(mapperService, updated); } public void close() { documentParser.close(); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { return mapping.toXContent(builder, params); } }
/******************************************************************************* * Copyright 2016, 2018 vanilladb.org contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package org.vanilladb.core.storage.tx.recovery; import static org.junit.Assert.assertTrue; import static org.vanilladb.core.sql.Type.BIGINT; import static org.vanilladb.core.sql.Type.INTEGER; import static org.vanilladb.core.sql.Type.VARCHAR; import java.sql.Connection; import java.util.LinkedList; import java.util.List; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CyclicBarrier; import java.util.logging.Level; import java.util.logging.Logger; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.vanilladb.core.server.ServerInit; import org.vanilladb.core.server.VanillaDb; import org.vanilladb.core.sql.Constant; import org.vanilladb.core.sql.IntegerConstant; import org.vanilladb.core.sql.Schema; import org.vanilladb.core.sql.VarcharConstant; import org.vanilladb.core.storage.buffer.Buffer; import org.vanilladb.core.storage.buffer.BufferMgr; import org.vanilladb.core.storage.file.BlockId; import org.vanilladb.core.storage.index.Index; import org.vanilladb.core.storage.index.IndexType; import org.vanilladb.core.storage.index.SearchKey; import org.vanilladb.core.storage.index.SearchRange; import org.vanilladb.core.storage.log.LogSeqNum; import org.vanilladb.core.storage.metadata.CatalogMgr; import org.vanilladb.core.storage.metadata.index.IndexInfo; import org.vanilladb.core.storage.record.RecordId; import org.vanilladb.core.storage.tx.Transaction; import org.vanilladb.core.util.BarrierStartRunner; public class RecoveryBasicTest { private static Logger logger = Logger.getLogger(RecoveryBasicTest.class.getName()); private static String fileName = "recoverybasictest.0"; private static String dataTableName = "recoverybasictest"; private static CatalogMgr md; private static BlockId blk; @BeforeClass public static void init() { ServerInit.init(RecoveryBasicTest.class); blk = new BlockId(fileName, 12); md = VanillaDb.catalogMgr(); Transaction tx = VanillaDb.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, false); Schema schema = new Schema(); schema.addField("cid", INTEGER); schema.addField("title", VARCHAR(20)); schema.addField("majorid", BIGINT); md.createTable(dataTableName, schema, tx); List<String> idxFlds = new LinkedList<String>(); idxFlds.add("cid"); md.createIndex("index_cid", dataTableName, idxFlds, IndexType.BTREE, tx); tx.commit(); if (logger.isLoggable(Level.INFO)) logger.info("BEGIN RECOVERY TEST"); } @AfterClass public static void finish() { if (logger.isLoggable(Level.INFO)) logger.info("FINISH RECOVERY TEST"); } @Before public void setup() { // reset initial values in the block // Dummy txNum Transaction tx = VanillaDb.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, false); long txNum = tx.getTransactionNumber(); Buffer buff = tx.bufferMgr().pin(blk); buff.setVal(4, new IntegerConstant(9876), txNum, null); buff.setVal(20, new VarcharConstant("abcdefg"), txNum, null); buff.setVal(40, new VarcharConstant("hijk"), txNum, null); buff.setVal(104, new IntegerConstant(9999), txNum, null); buff.setVal(120, new VarcharConstant("gfedcba"), txNum, null); buff.setVal(140, new VarcharConstant("kjih"), txNum, null); buff.setVal(204, new IntegerConstant(1415), txNum, null); buff.setVal(220, new VarcharConstant("pifo"), txNum, null); buff.setVal(240, new VarcharConstant("urth"), txNum, null); buff.setVal(304, new IntegerConstant(9265), txNum, null); buff.setVal(320, new VarcharConstant("piei"), txNum, null); buff.setVal(340, new VarcharConstant("ghth"), txNum, null); buff.setVal(404, new IntegerConstant(0), txNum, null); buff.setVal(420, new VarcharConstant("aaaa"), txNum, null); buff.setVal(440, new VarcharConstant("AAAA"), txNum, null); buff.setVal(504, new IntegerConstant(0), txNum, null); buff.setVal(520, new VarcharConstant("aaaa"), txNum, null); buff.setVal(540, new VarcharConstant("AAAA"), txNum, null); tx.bufferMgr().flushAllMyBuffers(); tx.bufferMgr().unpin(buff); tx.commit(); } @Test public void testRollback() { // log and make changes to the block's values LinkedList<BlockId> blklist = new LinkedList<BlockId>(); blklist.add(blk); Transaction tx = VanillaDb.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, false); RecoveryMgr rm = tx.recoveryMgr(); long txNum = tx.getTransactionNumber(); BufferMgr bm = tx.bufferMgr(); Buffer buff = bm.pin(blk); LogSeqNum lsn = rm.logSetVal(buff, 4, new IntegerConstant(1234)); buff.setVal(4, new IntegerConstant(1234), txNum, lsn); lsn = rm.logSetVal(buff, 20, new VarcharConstant("xyz")); buff.setVal(20, new VarcharConstant("xyz"), txNum, lsn); bm.unpin(buff); bm.flushAllMyBuffers(); // verify that the changes got made buff = bm.pin(blk); assertTrue("*****RecoveryTest: rollback changes not made", buff.getVal(4, INTEGER).equals(new IntegerConstant(1234)) && ((String) buff.getVal(20, VARCHAR).asJavaVal()).equals("xyz")); bm.unpin(buff); rm.onTxRollback(tx); // verify that they got rolled back buff = bm.pin(blk); int ti = (Integer) buff.getVal(4, INTEGER).asJavaVal(); String ts = (String) buff.getVal(20, VARCHAR).asJavaVal(); assertTrue("*****RecoveryTest: bad rollback", ti == 9876 && ts.equals("abcdefg")); bm.unpin(buff); } @Test public void testRecover() { CyclicBarrier startBarrier = new CyclicBarrier(4); CyclicBarrier endBarrier = new CyclicBarrier(4); // Tx1 Commit after checking new SetValTx(startBarrier, endBarrier, blk, 104, new IntegerConstant(1234), 0, 0).start(); // Tx2 Commit before checking new SetValTx(startBarrier, endBarrier, blk, 120, new VarcharConstant("xyz"), 2, 0).start(); // Tx3 Rollback before checking new SetValTx(startBarrier, endBarrier, blk, 140, new VarcharConstant("rst"), 0, 0).start(); // Wait for setValue running try { startBarrier.await(); } catch (InterruptedException | BrokenBarrierException e) { e.printStackTrace(); } // verify that the changes got made Transaction tx = VanillaDb.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, false); Buffer buff = tx.bufferMgr().pin(blk); assertTrue("*****RecoveryTest: recovery changes not made", buff.getVal(104, INTEGER).equals(new IntegerConstant(1234)) && ((String) buff.getVal(120, VARCHAR).asJavaVal()).equals("xyz") && ((String) buff.getVal(140, VARCHAR).asJavaVal()).equals("rst")); tx.bufferMgr().unpin(buff); // Wait for checking try { endBarrier.await(); } catch (InterruptedException | BrokenBarrierException e) { e.printStackTrace(); } Transaction recoveryTx = VanillaDb.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, false); RecoveryMgr.initializeSystem(recoveryTx); // verify that tx1 and tx3 got rolled back buff = recoveryTx.bufferMgr().pin(blk); int ti = (Integer) buff.getVal(104, INTEGER).asJavaVal(); String ts = (String) buff.getVal(120, VARCHAR).asJavaVal(); String ts2 = (String) buff.getVal(140, VARCHAR).asJavaVal(); assertTrue("*****RecoveryTest: bad recovery", ti == 9999 && ts.equals("xyz") && ts2.equals("kjih")); recoveryTx.bufferMgr().unpin(buff); } @Test public void testCrashingDuringRecovery() { Transaction tx1 = VanillaDb.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, false); Transaction tx2 = VanillaDb.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, false); Transaction tx3 = VanillaDb.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, false); Long txNum1 = tx1.getTransactionNumber(); Long txNum2 = tx2.getTransactionNumber(); Long txNum3 = tx3.getTransactionNumber(); Buffer buff = tx1.bufferMgr().pin(blk); LogSeqNum lsn1 = tx1.recoveryMgr().logSetVal(buff, 404, new IntegerConstant(1111)); buff.setVal(404, new IntegerConstant(1111), txNum1, lsn1); LogSeqNum lsn2 = tx2.recoveryMgr().logSetVal(buff, 420, new VarcharConstant("bbbb")); buff.setVal(420, new VarcharConstant("bbbb"), txNum2, lsn2); LogSeqNum lsn3 = tx3.recoveryMgr().logSetVal(buff, 440, new VarcharConstant("BBBB")); buff.setVal(440, new VarcharConstant("BBBB"), txNum3, lsn3); lsn1 = tx1.recoveryMgr().logSetVal(buff, 404, new IntegerConstant(2222)); buff.setVal(404, new IntegerConstant(2222), txNum1, lsn1); lsn2 = tx2.recoveryMgr().logSetVal(buff, 420, new VarcharConstant("cccc")); buff.setVal(420, new VarcharConstant("cccc"), txNum2, lsn2); lsn3 = tx3.recoveryMgr().logSetVal(buff, 440, new VarcharConstant("CCCC")); buff.setVal(440, new VarcharConstant("CCCC"), txNum3, lsn3); tx1.bufferMgr().unpin(buff); tx3.commit(); // verify that the changes got made Transaction tx = VanillaDb.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, false); buff = tx.bufferMgr().pin(blk); assertTrue("*****RecoverFormCrashing: changes not made", buff.getVal(404, INTEGER).equals(new IntegerConstant(2222)) && ((String) buff.getVal(420, VARCHAR).asJavaVal()).equals("cccc") && ((String) buff.getVal(440, VARCHAR).asJavaVal()).equals("CCCC")); tx.bufferMgr().unpin(buff); // Do partial recovery to simulate crash druing recovery; Transaction partRecoveryTx = VanillaDb.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, false); partRecoveryTx.recoveryMgr().recoverSystemPartially(partRecoveryTx, 5); // Do total recovery again Transaction recoveryTx = VanillaDb.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, false); recoveryTx.recoveryMgr().recoverSystem(recoveryTx); // verify that tx1 and tx2 got rolled back buff = recoveryTx.bufferMgr().pin(blk); int ti = (Integer) buff.getVal(404, INTEGER).asJavaVal(); String ts = (String) buff.getVal(420, VARCHAR).asJavaVal(); String ts2 = (String) buff.getVal(440, VARCHAR).asJavaVal(); assertTrue("*****CrashingDuringRecoveryTest: bad recovery", ti == 0 && ts.equals("aaaa") && ts2.equals("CCCC")); int clrCount = 0; ReversibleIterator<LogRecord> iter = new LogRecordIterator(); for (int i = 0; i < 8; i++) { LogRecord rec = iter.next(); if (rec instanceof CompesationLogRecord) clrCount++; } assertTrue("*****CrashingDuringRecoveryTest: UndoNext failure", clrCount >= 4); recoveryTx.bufferMgr().unpin(buff); } @Test public void testCrashingDuringRollBack() { Transaction tx1 = VanillaDb.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, false); Transaction tx2 = VanillaDb.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, false); Transaction tx3 = VanillaDb.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, false); Long txNum1 = tx1.getTransactionNumber(); Long txNum2 = tx2.getTransactionNumber(); Long txNum3 = tx3.getTransactionNumber(); Buffer buff = tx1.bufferMgr().pin(blk); LogSeqNum lsn1 = tx1.recoveryMgr().logSetVal(buff, 504, new IntegerConstant(1111)); buff.setVal(504, new IntegerConstant(1111), txNum1, lsn1); LogSeqNum lsn2 = tx2.recoveryMgr().logSetVal(buff, 520, new VarcharConstant("bbbb")); buff.setVal(520, new VarcharConstant("bbbb"), txNum2, lsn2); LogSeqNum lsn3 = tx3.recoveryMgr().logSetVal(buff, 540, new VarcharConstant("BBBB")); buff.setVal(540, new VarcharConstant("BBBB"), txNum3, lsn3); lsn1 = tx1.recoveryMgr().logSetVal(buff, 504, new IntegerConstant(2222)); buff.setVal(504, new IntegerConstant(2222), txNum1, lsn1); lsn2 = tx2.recoveryMgr().logSetVal(buff, 520, new VarcharConstant("cccc")); buff.setVal(520, new VarcharConstant("cccc"), txNum2, lsn2); lsn3 = tx3.recoveryMgr().logSetVal(buff, 540, new VarcharConstant("CCCC")); buff.setVal(540, new VarcharConstant("CCCC"), txNum3, lsn3); tx1.bufferMgr().unpin(buff); tx3.commit(); // verify that the changes got made Transaction tx = VanillaDb.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, false); buff = tx.bufferMgr().pin(blk); assertTrue("*****CrashingDuringRollBackTest: changes not made", buff.getVal(504, INTEGER).equals(new IntegerConstant(2222)) && ((String) buff.getVal(520, VARCHAR).asJavaVal()).equals("cccc")); tx.bufferMgr().unpin(buff); // Do partial recovery to simulate crash druing recovery; tx1.recoveryMgr().rollbackPartially(tx1, 5); tx1.recoveryMgr().rollbackPartially(tx2, 5); // Do total recovery again Transaction recoveryTx = VanillaDb.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, false); recoveryTx.recoveryMgr().recoverSystem(recoveryTx); // verify that tx1 and tx2 got rolled back buff = recoveryTx.bufferMgr().pin(blk); int ti = (Integer) buff.getVal(504, INTEGER).asJavaVal(); String ts = (String) buff.getVal(520, VARCHAR).asJavaVal(); String ts2 = (String) buff.getVal(540, VARCHAR).asJavaVal(); assertTrue("*****CrashingDuringRollBackTest: bad rollback", ti == 0 && ts.equals("aaaa") && ts2.equals("CCCC")); int clrCount = 0; ReversibleIterator<LogRecord> iter = new LogRecordIterator(); for(int i = 0 ; i < 8 ; i ++){ LogRecord rec = iter.next(); if(rec instanceof CompesationLogRecord) clrCount++; } assertTrue("*****CrashingDuringRollBackTest: UndoNext failure", clrCount>=4 ); recoveryTx.bufferMgr().unpin(buff); } @Test public void testCheckpoint() { CyclicBarrier startBarrier = new CyclicBarrier(5); CyclicBarrier endBarrier = new CyclicBarrier(5); // Tx1 Commit after chkpnt new SetValTx(startBarrier, endBarrier, blk, 204, new IntegerConstant(3538), 0, 2).start(); // Tx2 Commit before chkpnt new SetValTx(startBarrier, endBarrier, blk, 220, new VarcharConstant("twel"), 2, 0).start(); // Tx3 Rollback before chkpnt new SetValTx(startBarrier, endBarrier, blk, 240, new VarcharConstant("tfth"), 1, 0).start(); // Tx4 never commit or rollback new SetValTx(startBarrier, endBarrier, blk, 304, new IntegerConstant(9323), 0, 0).start(); // Wait for setValue running try { startBarrier.await(); } catch (InterruptedException | BrokenBarrierException e) { e.printStackTrace(); } // Main thread create chkpnt Transaction chkpnt = VanillaDb.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, false); VanillaDb.txMgr().createCheckpoint(chkpnt); chkpnt.commit(); // Wait for Checkpoint running try { endBarrier.await(); } catch (InterruptedException | BrokenBarrierException e) { e.printStackTrace(); } startBarrier = new CyclicBarrier(2); endBarrier = new CyclicBarrier(3); // Tx5 Commit after chkpnt new SetValTx(startBarrier, endBarrier, blk, 320, new VarcharConstant("sixt"), 2, 0).start(); // Tx6 never commit or rollback new SetValTx(startBarrier, endBarrier, blk, 340, new VarcharConstant("eenth"), 0, 0).start(); // Wait for tx5 commit running try { endBarrier.await(); } catch (InterruptedException | BrokenBarrierException e) { e.printStackTrace(); } Transaction recoveryTx = VanillaDb.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, false); RecoveryMgr.initializeSystem(recoveryTx); Buffer buff = recoveryTx.bufferMgr().pin(blk); int ti1 = (Integer) buff.getVal(204, INTEGER).asJavaVal(); String ts2 = (String) buff.getVal(220, VARCHAR).asJavaVal(); String ts3 = (String) buff.getVal(240, VARCHAR).asJavaVal(); int ti4 = (Integer) buff.getVal(304, INTEGER).asJavaVal(); String ts5 = (String) buff.getVal(320, VARCHAR).asJavaVal(); String ts6 = (String) buff.getVal(340, VARCHAR).asJavaVal(); assertTrue("*****RecoveryTest: bad checkpoint recovery", ti1 == 3538 && ts2.equals("twel") && ts3.equals("urth") && ti4 == 9265 && ts5.equals("sixt") && ts6.equals("ghth")); recoveryTx.bufferMgr().unpin(buff); } class SetValTx extends BarrierStartRunner { BlockId blk; int offset; Constant constant; Transaction tx; Buffer buff; RecoveryMgr rm; long txNum; int beforeTask = 0; int afterTask = 0; public SetValTx(CyclicBarrier startBarrier, CyclicBarrier endBarrier, BlockId blk, int offset, Constant constant, int beforeTask, int afterTask) { super(startBarrier, endBarrier); this.blk = blk; this.offset = offset; this.constant = constant; this.beforeTask = beforeTask; this.afterTask = afterTask; } @Override public void beforeTask() { tx = VanillaDb.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, false); txNum = tx.getTransactionNumber(); buff = tx.bufferMgr().pin(blk); rm = tx.recoveryMgr(); LogSeqNum lsn = rm.logSetVal(buff, offset, constant); buff.setVal(offset, this.constant, txNum, lsn); tx.bufferMgr().unpin(buff); doSomething(beforeTask); } @Override public void afterTask() { doSomething(afterTask); } @Override public void runTask() { } private void doSomething(int order) { switch (order) { case 0: // do not thing break; case 1: tx.rollback(); break; case 2: tx.commit(); break; } } } @Test public void testBTreeIndexRecovery() { // The first tx inserts records to the index Transaction tx = VanillaDb.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, false); IndexInfo ii = md.getIndexInfo(dataTableName, "cid", tx).get(0); Index cidIndex = ii.open(tx); RecordId[] records = new RecordId[10]; BlockId blk = new BlockId(dataTableName + ".tbl", 0); SearchKey int5 = new SearchKey(new IntegerConstant(5)); for (int i = 0; i < 10; i++) { records[i] = new RecordId(blk, i); cidIndex.insert(int5, records[i], true); } RecordId rid2 = new RecordId(blk, 19); SearchKey int7 = new SearchKey(new IntegerConstant(7)); cidIndex.insert(int7, rid2, true); cidIndex.close(); tx.commit(); // The second tx does recovery (redo) tx = VanillaDb.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, false); RecoveryMgr.initializeSystem(tx); tx.commit(); // The third tx checks the records tx = VanillaDb.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, true); ii = md.getIndexInfo(dataTableName, "cid", tx).get(0); cidIndex = ii.open(tx); cidIndex.beforeFirst(new SearchRange(int5)); int k = 0; while (cidIndex.next()) k++; assertTrue("*****RecoveryTest: bad index insertion recovery", k == 10); cidIndex.beforeFirst(new SearchRange(int7)); cidIndex.next(); assertTrue("*****RecoveryTest: bad index insertion recovery", cidIndex.getDataRecordId().equals(rid2)); cidIndex.close(); tx.commit(); // The fourth tx tests roll back deletion on index tx = VanillaDb.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, false); ii = md.getIndexInfo(dataTableName, "cid", tx).get(0); cidIndex = ii.open(tx); cidIndex.delete(int7, rid2, true); RecordId rid3 = new RecordId(blk, 999); SearchKey int777 = new SearchKey(new IntegerConstant(777)); cidIndex.insert(int777, rid3, true); cidIndex.close(); tx.rollback(); // The fifth tx checks the result tx = VanillaDb.txMgr().newTransaction(Connection.TRANSACTION_SERIALIZABLE, true); ii = md.getIndexInfo(dataTableName, "cid", tx).get(0); cidIndex = ii.open(tx); cidIndex.beforeFirst(new SearchRange(int7)); cidIndex.next(); assertTrue("*****RecoveryTest: bad index deletion rollback", cidIndex.getDataRecordId().equals(rid2)); cidIndex.beforeFirst(new SearchRange(int777)); cidIndex.next(); assertTrue("*****RecoveryTest: bad index insertion rollback", !cidIndex.next()); cidIndex.close(); tx.commit(); } }
/* Derby - Class org.apache.derbyTesting.functionTests.tests.jdbcapi.StatementPoolingTest Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derbyTesting.functionTests.tests.jdbcapi; import java.sql.CallableStatement; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import javax.sql.ConnectionPoolDataSource; import javax.sql.PooledConnection; import junit.framework.Test; import junit.framework.TestSuite; import org.apache.derbyTesting.junit.BaseJDBCTestCase; import org.apache.derbyTesting.junit.BaseJDBCTestSetup; import org.apache.derbyTesting.junit.J2EEDataSource; import org.apache.derbyTesting.junit.JDBC; import org.apache.derbyTesting.junit.TestConfiguration; /** * A set of tests specifically targeted at connections that support statement * pooling. */ public class StatementPoolingTest extends BaseJDBCTestCase { public StatementPoolingTest(String name) { super(name); } /** * Tests that the statement cache is able to throw out prepared statements * when it reaches maximum capacity. * * @throws SQLException if something goes wrong... */ public void testCacheOverflow() throws SQLException { final int stmtCount = 150; ConnectionPoolDataSource cpDs = J2EEDataSource.getConnectionPoolDataSource(); J2EEDataSource.setBeanProperty(cpDs, "maxStatements", new Integer(11)); J2EEDataSource.setBeanProperty(cpDs, "createDatabase", "create"); PooledConnection pc = cpDs.getPooledConnection(); Connection con = pc.getConnection(); for (int i=0; i < stmtCount; i++) { // Yes, the "values + i" is intended here. PreparedStatement pStmt = con.prepareStatement("values " + i); ResultSet rs = pStmt.executeQuery(); JDBC.assertSingleValueResultSet(rs, Integer.toString(i)); pStmt.close(); } con.close(); pc.close(); } /** * Verifies that statement pooling is enabled by checking the names of the * implementation classes in Derby. * * @throws SQLException if creating the JDBC objects fail */ public void testPoolingEnabledByCheckingImplementationDetails() throws SQLException { final String conClass = "CachingLogicalConnection"; final String psClass = "LogicalPreparedStatement"; final String csClass = "LogicalCallableStatement"; Connection con = getConnection(); assertClassName(con, conClass); assertClassName(con.prepareStatement("values 1"), psClass); assertClassName(con.prepareStatement("values 1", Statement.RETURN_GENERATED_KEYS), psClass); assertClassName(con.prepareStatement("values 1", Statement.NO_GENERATED_KEYS), psClass); assertClassName(con.prepareStatement("values 1", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY), psClass); assertClassName(con.prepareStatement("values 1", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE), psClass); assertClassName(con.prepareStatement("values 1", (String[])null), psClass); assertClassName(con.prepareStatement("values 1", new String[] {}), psClass); assertClassName(con.prepareCall("values 1"), csClass); assertClassName(con.prepareCall("values 1", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY), csClass); assertClassName(con.prepareCall("values 1", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.CLOSE_CURSORS_AT_COMMIT), csClass); } /** * Assert that the name of the class of the object is what is expected. * <p> * The assert does not consider package names, only the name passed in as * {@code expectedName} and the passed in name concatenated with "40". * * @param obj object to check * @param expectedName the expected name of the class * @throws AssertionFailedError if the class name is not as expected */ private static void assertClassName(Object obj, String expectedName) { assertNotNull("The expected name cannot be <null>", expectedName); assertNotNull("The object cannot be <null>", obj); String[] names = obj.getClass().getName().split("\\."); String simpleName = names[names.length -1]; if (!simpleName.equals(expectedName)) { if (!simpleName.equals(expectedName + "40")) { fail("Expected class name " + expectedName + " or " + expectedName + "40, got " + simpleName); } } } /** * This test merley checks that creating a logical prepared statement does * not fail. * * @throws SQLException if creating the prepared statement fails */ public void testPrepareStatementPath() throws SQLException { PreparedStatement ps = prepareStatement("values 9708"); ps.close(); } /** * This test merley checks that creating a logical callable statement does * not fail. * * @throws SQLException if creating the callable statement fails */ public void testPrepareCallPath() throws SQLException { CallableStatement cs = prepareCall( "CALL SYSCS_UTIL.SYSCS_SET_RUNTIMESTATISTICS(0)"); cs.close(); } /** * This test merley checks that creating a logical callable statement, which * is not really a call, does not fail. * * @throws SQLException if creating the callable statement fails */ public void testPrepareCallWithNoCallPath() throws SQLException { CallableStatement cs = prepareCall("values 1"); cs.close(); } /** * Tests that closing the prepared statement also closes the result set. * * @throws SQLException if something goes wrong... */ public void testClosingPSClosesRS() throws SQLException { PreparedStatement ps = prepareStatement("values 99"); ResultSet rs = ps.executeQuery(); ps.close(); try { rs.next(); fail("Result set should have been closed"); } catch (SQLException sqle) { assertSQLState("XCL16", sqle); } } /** * Tests that the connection holdability is reset, when it is first * modified, the connection closed and a new logical connection obtained. * * @throws SQLException if something goes wrong... */ public void testHoldabilityIsResetExplicitClose() throws SQLException { doTestHoldabilityIsReset(true); } /** * Tests that the connection holdability is reset, when it is first * modified, and a new logical connection obtained without first explicitly * closing the previous one. * * @throws SQLException if something goes wrong... */ public void testHoldabilityIsResetNoExplicitClose() throws SQLException { doTestHoldabilityIsReset(false); } /** * Test sequence for testing if the connection holdability is reset. * * @param closeConnection determines if the logical connection is * explicitly closed before a new one is obtained * @throws SQLException if something goes wrong... */ private void doTestHoldabilityIsReset(final boolean closeConnection) throws SQLException { ConnectionPoolDataSource cpDs = J2EEDataSource.getConnectionPoolDataSource(); J2EEDataSource.setBeanProperty(cpDs, "maxStatements", new Integer(7)); J2EEDataSource.setBeanProperty(cpDs, "createDatabase", "create"); PooledConnection pc = cpDs.getPooledConnection(); // Keep track of our own connection, the framework currently creates // a new pooled connection and then obtains a connection from that. // Statement pooling only works within a single pooled connection. Connection con = pc.getConnection(); assertEquals("Unexpected default holdability", ResultSet.HOLD_CURSORS_OVER_COMMIT, con.getHoldability()); con.setHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT); assertEquals("Holdability not updated", ResultSet.CLOSE_CURSORS_AT_COMMIT, con.getHoldability()); if (closeConnection) { con.close(); } con = pc.getConnection(); assertEquals("Holdability not reset", ResultSet.HOLD_CURSORS_OVER_COMMIT, con.getHoldability()); pc.close(); } public void testIsolationLevelIsResetExplicitCloseQuery() throws SQLException { doTestIsolationLevelIsReset(true, true); } public void testIsolationLevelIsResetExplicitCloseNoQuery() throws SQLException { doTestIsolationLevelIsReset(true, false); } public void testIsolationLevelIsResetNoExplicitCloseNoQuery() throws SQLException { doTestIsolationLevelIsReset(false, false); } public void testIsolationLevelIsResetNoExplicitCloseQuery() throws SQLException { doTestIsolationLevelIsReset(false, true); } /** * Tests if the connection isolation level is reset when a new connection * is obtained. * <p> * The two arguments are introduced to test different scenarios; explicit * and implicit connection closing, and session data caching (piggybacked * information). * * @param closeConnection tells if the connection is explicitly closed * before a new one is obtained * @param executeQuery tells if a query is executed on the connection before * a new connection is obtained. * @throws SQLException if something goes wrong... */ private void doTestIsolationLevelIsReset(final boolean closeConnection, final boolean executeQuery) throws SQLException { ConnectionPoolDataSource cpDs = J2EEDataSource.getConnectionPoolDataSource(); J2EEDataSource.setBeanProperty(cpDs, "maxStatements", new Integer(7)); J2EEDataSource.setBeanProperty(cpDs, "createDatabase", "create"); PooledConnection pc = cpDs.getPooledConnection(); // Keep track of our own connection, the framework currently creates // a new pooled connection and then obtains a connection from that. // Statement pooling only works within a single pooled connection. Connection con = pc.getConnection(); assertEquals("Unexpected default isolation level", Connection.TRANSACTION_READ_COMMITTED, con.getTransactionIsolation()); con.setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ); assertEquals("Isolation level not updated", Connection.TRANSACTION_REPEATABLE_READ, con.getTransactionIsolation()); if (executeQuery) { PreparedStatement ps = con.prepareStatement("values 2"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "2"); ps.close(); } if (closeConnection) { con.close(); } con = pc.getConnection(); assertEquals("Isolation level not reset", Connection.TRANSACTION_READ_COMMITTED, con.getTransactionIsolation()); pc.close(); } /** * Tests that closing the caching logical connection closes the logical * prepared statement, but not the physical prepared statement. * <p> * Since there are no public interface methods to test this, the approach * taken will be as follows: * <ol> <li>Create a new table.</li> * <li>Prepare a statement selecting from the table.</li> * <li>Close the statement, putting it into the cache.</li> * <li>Delete the table.</li> * <li>Prepare the selecting statement again.</li> * <li>Execute the statement.</li> * </ol> * If the physical statement was closed when closing the caching logical * connection, the prepare will fail. If it was left open, the prepare will * succeed because the statement is fetched from the cache, but the * execution will fail because the table no longer exists. * * @throws SQLException if something goes wrong... */ public void testCachingLogicalConnectionCloseLeavesPhysicalStatementsOpen() throws SQLException { final String SELECT_SQL = "select * from clcclso"; ConnectionPoolDataSource cpDs = J2EEDataSource.getConnectionPoolDataSource(); J2EEDataSource.setBeanProperty(cpDs, "maxStatements", new Integer(7)); J2EEDataSource.setBeanProperty(cpDs, "createDatabase", "create"); PooledConnection pc = cpDs.getPooledConnection(); // Keep track of our own connection, the framework currently creates // a new pooled connection and then obtains a connection from that. // Statement pooling only works within a single pooled connection. Connection con = pc.getConnection(); con.setAutoCommit(false); Statement stmt = createStatement(); stmt.executeUpdate("create table clcclso (id int)"); PreparedStatement ps = con.prepareStatement(SELECT_SQL); commit(); con.close(); try { // Should fail because the logical statement has been closed. ps.execute(); fail("Logical connection close did not close logical statement."); } catch (SQLException sqle) { // Already closed. assertSQLState("XJ012", sqle); } stmt = createStatement(); stmt.executeUpdate("drop table clcclso"); commit(); // If an exception is thrown here, statement pooling is disabled or not // working correctly. con = pc.getConnection(); ps = con.prepareStatement(SELECT_SQL); // From cache. try { // Should fail here because the referenced table has been deleted. ps.execute(); fail("Execution should have failed"); } catch (SQLException sqle) { assertSQLState("42X05", sqle); } ps.close(); // Make sure the connection is still valid. ps = con.prepareStatement("values 976"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "976"); ps.close(); con.close(); } /** * Checks if a reset of one statement affects other open statement on the * connection. * * @throws SQLException if something goes wrong... */ public void resTestCloseDoesNotAffectOtherStatement() throws SQLException { final String sql = "select * from stmtpooltest where val > 0 and val " + "<= 7 order by val"; PreparedStatement psForward = prepareStatement(sql); ResultSet rsForward = psForward.executeQuery(); assertTrue(rsForward.next()); assertEquals("1", rsForward.getString(1)); assertTrue(rsForward.next()); assertEquals("2", rsForward.getString(1)); PreparedStatement psScrollable = prepareStatement(sql, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); ResultSet rsScrollable = psScrollable.executeQuery(); // Read seven rows from the scrollable rs, position at last row. for (int val=1; val <= 7; val++) { assertTrue(rsScrollable.next()); assertEquals(val, rsScrollable.getInt(1)); } // Create a statement, then close it. PreparedStatement psToClose = prepareStatement( "select val from stmtpooltest where val = 5"); JDBC.assertSingleValueResultSet(psToClose.executeQuery(), "5"); psToClose.close(); assertTrue(rsForward.next()); assertEquals("3", rsForward.getString(1)); assertTrue(rsScrollable.first()); assertEquals("1", rsScrollable.getString(1)); // Should fetch a cached statement. psToClose = prepareStatement( "select val from stmtpooltest where val = 5"); JDBC.assertSingleValueResultSet(psToClose.executeQuery(), "5"); psToClose.close(); assertTrue(rsScrollable.last()); assertEquals("7", rsScrollable.getString(1)); assertFalse(rsScrollable.next()); rsScrollable.close(); assertTrue(rsForward.next()); assertEquals("4", rsForward.getString(1)); rsForward.close(); } /** * Tests that closing a logical prepared statement referring a non-existing * table works. * <p> * In this test, the prepared statement that will be made invalid by the * delete is held open by the user. * * @throws SQLException if something goes wrong... */ public void testDeleteReferringTableWhenOpen() throws SQLException { getConnection().setAutoCommit(false); // Create a table, insert a row, then create a statement selecting it. Statement stmt = createStatement(); stmt.executeUpdate("create table testdeletewhenopen (id int)"); stmt.executeUpdate("insert into testdeletewhenopen values 1"); PreparedStatement ps = prepareStatement( "select * from testdeletewhenopen"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "1"); // Now delete the table and logically close the prepared statement. stmt.executeUpdate("drop table testdeletewhenopen"); stmt.close(); ps.close(); // If running without statement pooling, you will get exception here. ps = prepareStatement("select * from testdeletewhenopen"); // If we get this far, there is a big change we have fetched an // invalid statement from the cache, but we won't get the exception // until we try to execute it. try { ps.executeQuery(); fail("Prepared statement not valid, referring non-existing table"); } catch (SQLException sqle) { assertSQLState("42X05", sqle); } } /** * Tests that closing a logical prepared statement referring a non-existing * table works. * <p> * In this test, the prepared statement that will be made invalid by the * delete is in the statement cache when the delete happens. * * @throws SQLException if something goes wrong... */ public void testDeleteReferringTableWhenInCache() throws SQLException { getConnection().setAutoCommit(false); // Create a table, insert a row, then create a statement selecting it. Statement stmt = createStatement(); stmt.executeUpdate("create table testdeletewhenopen (id int)"); stmt.executeUpdate("insert into testdeletewhenopen values 1"); PreparedStatement ps = prepareStatement( "select * from testdeletewhenopen"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "1"); // Put the statement into the cache. ps.close(); // Now delete the table and fetch the cached prepared statement. stmt.executeUpdate("drop table testdeletewhenopen"); stmt.close(); // If running without statement pooling, you will get exception here. ps = prepareStatement("select * from testdeletewhenopen"); // If we get this far, there is a big change we have fetched an // invalid statement from the cache, but we won't get the exception // until we try to execute it. try { ps.executeQuery(); fail("Prepared statement not valid, referring non-existing table"); } catch (SQLException sqle) { assertSQLState("42X05", sqle); } } public void resTestCloseCursorsAtCommit() throws SQLException { doTestResultSetCloseForHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT); } public void resTestHoldCursorsOverCommit() throws SQLException { doTestResultSetCloseForHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); } /** * Tests that a temporary table crated in one logical connection is gone * in the next logical connection. * * @throws SQLException if the test fails for some reason */ public void testTemporaryTablesAreDeletedInNewLogicalConnection() throws SQLException { ConnectionPoolDataSource cpds = J2EEDataSource.getConnectionPoolDataSource(); J2EEDataSource.setBeanProperty(cpds, "maxStatements", new Integer(3)); J2EEDataSource.setBeanProperty(cpds, "createDatabase", "create"); PooledConnection pc = cpds.getPooledConnection(); Connection lcOne = pc.getConnection(); // Create the first logical connection and the temporary table. Statement stmt = lcOne.createStatement(); stmt.executeUpdate("DECLARE GLOBAL TEMPORARY TABLE cpds_temp_table " + "(id int) ON COMMIT PRESERVE ROWS NOT LOGGED"); // The temporary table is created in SESSION. JDBC.assertEmpty( stmt.executeQuery("select * from SESSION.cpds_temp_table")); stmt.executeUpdate("insert into SESSION.cpds_temp_table values 1"); lcOne.commit(); lcOne.close(); // Create the second logical connection and try to query the temp table. Connection lcTwo = pc.getConnection(); stmt = lcTwo.createStatement(); try { stmt.executeQuery("select * from SESSION.cpds_temp_table"); fail("Temporary table still existing in new logical connection."); } catch (SQLException sqle) { // Expect syntax error. assertSQLState("42X05", sqle); } lcTwo.rollback(); lcTwo.close(); pc.close(); } /** * Tests if the holdability settings is taking effect, and also that the * result set is closed when the connection is closed. * * @param holdability result set holdability as specfied by * {@link java.sql.ResultSet} * @throws SQLException if something goes wrong... */ private void doTestResultSetCloseForHoldability(int holdability) throws SQLException { getConnection().setAutoCommit(false); PreparedStatement ps = prepareStatement( "select * from stmtpooltest order by val", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, holdability); ResultSet rs = ps.executeQuery(); assertTrue(rs.next()); assertEquals(1, rs.getInt(1)); commit(); if (holdability == ResultSet.HOLD_CURSORS_OVER_COMMIT) { assertTrue(rs.next()); assertEquals(2, rs.getInt(1)); } getConnection().close(); try { rs.next(); fail("Should have thrown exception"); } catch (SQLException sqle) { assertSQLState("XCL16", sqle); } } /** * Make sure {@link ResultSet#getStatement} returns the same object as the * one that created the result set. */ public void testGetStatementCallable() throws SQLException { doTestGetStatement(prepareCall("values 7653")); } /** * Make sure {@link ResultSet#getStatement} returns the same object as the * one that created the result set. */ public void testGetStatementPrepared() throws SQLException { doTestGetStatement(prepareStatement("values 7652")); } /** * Make sure {@link ResultSet#getStatement} returns the same object as the * one that created the result set. * * @param ps prepared or callable statement to test with * @throws SQLException if something goes wrong... */ private void doTestGetStatement(PreparedStatement ps) throws SQLException { ResultSet psRs = ps.executeQuery(); assertSame(ps, psRs.getStatement()); psRs.close(); // Try another way. ps.execute(); psRs = ps.getResultSet(); assertSame(ps, psRs.getStatement()); assertFalse(ps.getMoreResults()); assertNull(ps.getResultSet()); // This one should fail. try { psRs = ps.executeQuery("values 99"); fail("executeQuery(String) should be disallowed"); } catch (SQLException sqle) { assertSQLState("XJ016", sqle); } } /** * Checks if closing the logical connection closes the logical statement. * * @throws SQLException if something goes wrong... */ public void resTestLogicalConnectionCloseInvalidatesLogicalStatement() throws SQLException { Connection con = getConnection(); PreparedStatement ps = con.prepareStatement("select * from stmtpooltest"); // Don't execute the statement. assertNotNull(ps.getMetaData()); con.close(); try { ps.getMetaData(); fail("Logical statement should be closed and throw exception"); } catch (SQLException sqle) { assertSQLState("XJ012", sqle); } con = getConnection(); ps = con.prepareStatement("select * from stmtpooltest order by val"); // Execute the statement this time. ResultSet rs = ps.executeQuery(); assertTrue(rs.next()); assertNotNull(ps.getMetaData()); rs.close(); con.close(); try { ps.getMetaData(); fail("Logical statement should be closed and throw exception"); } catch (SQLException sqle) { assertSQLState("XJ012", sqle); } } /** * Tests that nothing is committed on the connection when autocommit is * disabled. * * @throws SQLException if something goes wrong... */ public void resTestNoCommitOnReuse() throws SQLException { // Make sure the table is empty. cleanTableExceptedToBeEmpty(); // Start test final String sql = "insert into stmtpooldata (val) values ?"; getConnection().setAutoCommit(false); PreparedStatement ps = prepareStatement(sql); ps.setInt(1, 68); assertEquals(1, ps.executeUpdate()); ps.close(); ps = prepareStatement(sql); ps.setInt(1, 77); assertEquals(1, ps.executeUpdate()); Statement stmt = createStatement(); ResultSet rs =stmt.executeQuery( "select val from stmtpooldata order by val"); JDBC.assertFullResultSet(rs, new String[][] {{"68"},{"77"}}); rollback(); rs = stmt.executeQuery("select val from stmtpooldata order by val"); JDBC.assertEmpty(rs); } /** * Tests that autocommit is working. * * @throws SQLException if something goes wrong... */ public void resTestCommitOnReuse() throws SQLException { // Make sure the table is empty. cleanTableExceptedToBeEmpty(); // Start test final String sql = "insert into stmtpooldata (val) values ?"; getConnection().setAutoCommit(true); PreparedStatement ps = prepareStatement(sql); ps.setInt(1, 68); assertEquals(1, ps.executeUpdate()); ps.close(); ps = prepareStatement(sql); ps.setInt(1, 77); assertEquals(1, ps.executeUpdate()); Statement stmt = createStatement(); ResultSet rs =stmt.executeQuery( "select val from stmtpooldata order by val"); JDBC.assertFullResultSet(rs, new String[][] {{"68"},{"77"}}); rollback(); rs = stmt.executeQuery("select val from stmtpooldata order by val"); JDBC.assertFullResultSet(rs, new String[][] {{"68"},{"77"}}); // Clean up assertEquals(2, stmt.executeUpdate("delete from stmtpooldata")); } /** * Tests that nothing is committed on the connection when autocommit is * disabled. * * @throws SQLException if something goes wrong... */ public void resTestNoDataCommittedOnInvalidTransactionState() throws SQLException { // Make sure the table is empty. cleanTableExceptedToBeEmpty(); // Start test final String sql = "insert into stmtpooldata (val) values ?"; getConnection().setAutoCommit(false); PreparedStatement ps = prepareStatement(sql); ps.setInt(1, 68); assertEquals(1, ps.executeUpdate()); ps.close(); ps = prepareStatement(sql); ps.setInt(1, 77); assertEquals(1, ps.executeUpdate()); Statement stmt = createStatement(); ResultSet rs = stmt.executeQuery( "select val from stmtpooldata order by val"); JDBC.assertFullResultSet(rs, new String[][] {{"68"},{"77"}}); try { getConnection().close(); // We should not get here, but let's see what has happened. // Possible symptoms: // - lock timeout: connection resources has not been freed. // - no rows: rollback was issued. // - two rows: commit was issued. stmt = createStatement(); rs = stmt.executeQuery("select val from stmtpooldata order by val"); int rows = 0; while (rs.next()) { rows++; } fail("Connection should not be allowed to close. Rows in table: " + rows); } catch (SQLException sqle) { assertSQLState("25001", sqle); rollback(); } stmt = createStatement(); rs = stmt.executeQuery("select val from stmtpooldata order by val"); JDBC.assertEmpty(rs); } /** * Deletes row from a test table that is expected to be empty. * * @throws SQLException if a database operation fails */ private void cleanTableExceptedToBeEmpty() throws SQLException { Statement stmt = createStatement(); ResultSet rs = stmt.executeQuery("select * from stmtpooldata"); int rowCount = 0; while (rs.next()) { rowCount++; } rs.close(); // Delete rows if any, and print a warning if verbosity is on. if (rowCount > 0) { println("Expected empty table, got " + rowCount + " rows."); assertEquals(rowCount, stmt.executeUpdate("delete from stmtpooldata")); } } public static Test suite() { TestSuite suite = new TestSuite("StatementPoolingTest suite"); TestSuite baseSuite = new TestSuite(StatementPoolingTest.class); // Statement pooling is not yet enabled for XA. //suite.addTest(TestConfiguration.connectionXADecorator(baseSuite)); suite.addTest(TestConfiguration.connectionCPDecorator(baseSuite)); // Add tests that require data from the database. TestSuite reqDataSuite = new TestSuite("Requires data suite"); reqDataSuite.addTest(new StatementPoolingTest( "resTestCloseDoesNotAffectOtherStatement")); reqDataSuite.addTest(new StatementPoolingTest( "resTestLogicalConnectionCloseInvalidatesLogicalStatement")); reqDataSuite.addTest(new StatementPoolingTest( "resTestHoldCursorsOverCommit")); reqDataSuite.addTest(new StatementPoolingTest( "resTestCloseCursorsAtCommit")); reqDataSuite.addTest(new StatementPoolingTest( "resTestNoCommitOnReuse")); reqDataSuite.addTest(new StatementPoolingTest( "resTestCommitOnReuse")); // This test fails, DERBY-3319 is probably the cause. //reqDataSuite.addTest(new StatementPoolingTest( // "resTestNoDataCommittedOnInvalidTransactionState")); suite.addTest(TestConfiguration.connectionCPDecorator( new BaseJDBCTestSetup(reqDataSuite) { public void setUp() throws Exception { // Generate some data we can use in the tests. Statement stmt = getConnection().createStatement(); try { stmt.executeUpdate("drop table stmtpooltest"); } catch (SQLException sqle) { assertSQLState("42Y55", sqle); } stmt.executeUpdate("create table stmtpooltest (" + "id int generated always as identity," + "val int)"); PreparedStatement ps = getConnection().prepareStatement( "insert into stmtpooltest values (DEFAULT, ?)"); // Insert data with val in range [1,7]. for (int val=1; val <= 7; val++) { ps.setInt(1, val); ps.addBatch(); } ps.executeBatch(); try { stmt.executeUpdate("drop table stmtpooldata"); } catch (SQLException sqle) { assertSQLState("42Y55", sqle); } stmt.executeUpdate("create table stmtpooldata (" + "id int generated always as identity," + "val int)"); // Leave this table empty. } })); return TestConfiguration.clientServerDecorator(suite); } }
/* * Copyright 2018 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.config.materials; import com.thoughtworks.go.config.*; import com.thoughtworks.go.config.elastic.ElasticProfile; import com.thoughtworks.go.config.validation.FilePathTypeValidator; import com.thoughtworks.go.domain.ConfigErrors; import com.thoughtworks.go.domain.PipelineGroups; import com.thoughtworks.go.domain.materials.MaterialConfig; import com.thoughtworks.go.util.FilenameUtil; import com.thoughtworks.go.util.command.UrlArgument; import org.apache.commons.lang3.StringUtils; import java.io.File; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import static java.lang.String.format; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotEmpty; /** * @understands a source control repository and its configuration */ public abstract class ScmMaterialConfig extends AbstractMaterialConfig implements ParamsAttributeAware { public static final String URL = "url"; public static final String USERNAME = "username"; @ConfigSubtag private Filter filter; @ConfigAttribute(value = "invertFilter", optional = true) private boolean invertFilter = false; @ConfigAttribute(value = "dest", allowNull = true) protected String folder; @ConfigAttribute(value = "autoUpdate", optional = true) private boolean autoUpdate = true; public static final String PASSWORD = "password"; public static final String ENCRYPTED_PASSWORD = "encryptedPassword"; public static final String PASSWORD_CHANGED = "passwordChanged"; public static final String AUTO_UPDATE = "autoUpdate"; public static final String FOLDER = "folder"; public static final String FILTER = "filterAsString"; public static final String INVERT_FILTER = "invertFilter"; public ScmMaterialConfig(String typeName) { super(typeName); } public ScmMaterialConfig(CaseInsensitiveString name, Filter filter, boolean invertFilter, String folder, boolean autoUpdate, String typeName, ConfigErrors errors) { super(typeName, name, errors); this.filter = filter; this.invertFilter = invertFilter; this.folder = folder; this.autoUpdate = autoUpdate; } @Override protected void appendPipelineUniqueCriteria(Map<String, Object> basicCriteria) { basicCriteria.put("dest", folder); } public File workingdir(File baseFolder) { if (getFolder() == null) { return baseFolder; } return new File(baseFolder, getFolder()); } //most of the material such as hg, git, p4 all print the file from the root without '/' //but subverion print it with '/', we standarize it here. look at the implementation of subversion as well. public boolean matches(String name, String regex) { if (regex.startsWith("/")) { regex = regex.substring(1); } return name.matches(regex); } public abstract String getUserName(); public abstract String getPassword(); public abstract String getEncryptedPassword(); public abstract boolean isCheckExternals(); public abstract String getUrl(); public abstract void setUrl(String url); protected abstract String getLocation(); public Filter filter() { if (filter == null) { return new Filter(); } return filter; } public String getFilterAsString() { return filter().getStringForDisplay(); } public Filter rawFilter() { return filter; } public void setFilter(Filter filter) { this.filter = filter; } public boolean isInvertFilter() { return invertFilter; } public boolean getInvertFilter() { return invertFilter; } public void setInvertFilter(boolean value) { invertFilter = value; } public String getDescription() { return getUriForDisplay(); } public abstract String getUriForDisplay(); public String getFolder() { return folder; } public String getDisplayName() { return name == null ? getUriForDisplay() : CaseInsensitiveString.str(name); } public boolean isAutoUpdate() { return autoUpdate; } public boolean getAutoUpdate() { return autoUpdate; } public void setAutoUpdate(boolean value) { autoUpdate = value; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } if (!super.equals(o)) { return false; } ScmMaterialConfig that = (ScmMaterialConfig) o; if (folder != null ? !folder.equals(that.folder) : that.folder != null) { return false; } return super.equals(that); } @Override public int hashCode() { int result = super.hashCode(); result = 31 * result + (folder != null ? folder.hashCode() : 0); return result; } @Override protected final void validateConcreteMaterial(ValidationContext validationContext) { validateNotOutsideSandbox(); validateDestFolderPath(); validateConcreteScmMaterial(validationContext); } public abstract void validateConcreteScmMaterial(ValidationContext validationContext); private void validateDestFolderPath() { if (StringUtils.isBlank(folder)) { return; } if (!new FilePathTypeValidator().isPathValid(folder)) { errors().add(FOLDER, FilePathTypeValidator.errorMessage("directory", getFolder())); } } public void setConfigAttributes(Object attributes) { super.setConfigAttributes(attributes); Map map = (Map) attributes; if (map.containsKey(FOLDER)) { String folder = (String) map.get(FOLDER); if (StringUtils.isBlank(folder)) { folder = null; } this.folder = folder; } this.setAutoUpdate("true".equals(map.get(AUTO_UPDATE))); this.setInvertFilter("true".equals(map.get(INVERT_FILTER))); if (map.containsKey(FILTER)) { String pattern = (String) map.get(FILTER); if (!StringUtils.isBlank(pattern)) { this.setFilter(Filter.fromDisplayString(pattern)); } else { this.setFilter(null); } } } public boolean isAutoUpdateStateMismatch(MaterialConfigs materialAutoUpdateMap) { if (materialAutoUpdateMap.size() > 1) { for (MaterialConfig otherMaterial : materialAutoUpdateMap) { if (otherMaterial.isAutoUpdate() != this.autoUpdate) { return true; } } } return false; } public void setAutoUpdateMismatchError() { addError(AUTO_UPDATE, String.format("Material of type %s (%s) is specified more than once in the configuration with different values for the autoUpdate attribute." + " All copies of this material must have the same value for this attribute.", getTypeForDisplay(), getDescription())); } public void setAutoUpdateMismatchErrorWithConfigRepo() { addError(AUTO_UPDATE, String.format("Material of type %s (%s) is specified as a configuration repository and pipeline material with disabled autoUpdate." + " All copies of this material must have autoUpdate enabled or configuration repository must be removed", getTypeForDisplay(), getDescription())); } public void setDestinationFolderError(String message) { addError(FOLDER, message); } public void validateNotSubdirectoryOf(String otherSCMMaterialFolder) { String myDirPath = this.getFolder(); if (myDirPath == null || otherSCMMaterialFolder == null) { return; } if (FilenameUtil.isNormalizedDirectoryPathInsideNormalizedParentDirectory(myDirPath, otherSCMMaterialFolder)) { addError(FOLDER, "Invalid Destination Directory. Every material needs a different destination directory and the directories should not be nested."); } } public void validateDestinationDirectoryName(String otherSCMMaterialFolder) { if (folder != null && folder.equalsIgnoreCase(otherSCMMaterialFolder)) { addError(FOLDER, "The destination directory must be unique across materials."); } } private void validateNotOutsideSandbox() { String dest = this.getFolder(); if (dest == null) { return; } if (!(FilenameUtil.isNormalizedPathOutsideWorkingDir(dest))) { setDestinationFolderError(String.format("Dest folder '%s' is not valid. It must be a sub-directory of the working folder.", dest)); } } public Boolean isUsedInFetchArtifact(PipelineConfig pipelineConfig) { return false; } // TODO: Consider renaming this to dest since we use that word in the UI & Config public void setFolder(String folder) { this.folder = folder; } protected void validateMaterialUrl(UrlArgument url, ValidationContext validationContext) { if (url == null || isBlank(url.forDisplay())) { errors().add(URL, "URL cannot be blank"); return; } if (!url.isValid()) { errors.add(URL, "Only password can be specified as secret params"); } validateSecretParamsConfig(URL, url.getSecretParams(), validationContext); } protected void validatePassword(ValidationContext validationContext) { if (isNotEmpty(getEncryptedPassword())) { try { validateSecretParamsConfig("encryptedPassword", SecretParams.parse(getPassword()), validationContext); } catch (Exception e) { addError("encryptedPassword", format("Encrypted password value for %s with url '%s' is invalid. This usually happens when the cipher text is modified to have an invalid value.", this.getType(), this.getUriForDisplay())); } } } protected void validateSecretParamsConfig(String key, SecretParams secretParams, ValidationContext validationContext) { if (!secretParams.hasSecretParams()) { return; } final List<String> missingSecretConfigs = secretParams.stream() .filter(secretParam -> validationContext.getCruiseConfig().getSecretConfigs().find(secretParam.getSecretConfigId()) == null) .map(SecretParam::getSecretConfigId) .collect(Collectors.toList()); if (!missingSecretConfigs.isEmpty()) { addError(key, String.format("Secret config with ids `%s` does not exist.", String.join(", ", missingSecretConfigs))); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.functions.aggfunctions; import org.apache.flink.table.dataformat.BinaryString; import org.apache.flink.table.dataformat.Decimal; import org.apache.flink.table.dataformat.GenericRow; import org.apache.flink.table.functions.AggregateFunction; import org.apache.flink.table.functions.aggfunctions.LastValueWithRetractAggFunction.BooleanLastValueWithRetractAggFunction; import org.apache.flink.table.functions.aggfunctions.LastValueWithRetractAggFunction.ByteLastValueWithRetractAggFunction; import org.apache.flink.table.functions.aggfunctions.LastValueWithRetractAggFunction.DecimalLastValueWithRetractAggFunction; import org.apache.flink.table.functions.aggfunctions.LastValueWithRetractAggFunction.DoubleLastValueWithRetractAggFunction; import org.apache.flink.table.functions.aggfunctions.LastValueWithRetractAggFunction.FloatLastValueWithRetractAggFunction; import org.apache.flink.table.functions.aggfunctions.LastValueWithRetractAggFunction.IntLastValueWithRetractAggFunction; import org.apache.flink.table.functions.aggfunctions.LastValueWithRetractAggFunction.LongLastValueWithRetractAggFunction; import org.apache.flink.table.functions.aggfunctions.LastValueWithRetractAggFunction.ShortLastValueWithRetractAggFunction; import org.apache.flink.table.functions.aggfunctions.LastValueWithRetractAggFunction.StringLastValueWithRetractAggFunction; import org.apache.flink.table.typeutils.DecimalTypeInfo; import java.lang.reflect.Method; import java.util.Arrays; import java.util.List; /** * Test case for built-in LastValue with retract aggregate function. * This class tests `accumulate` method without order argument. */ public abstract class LastValueWithRetractAggFunctionWithoutOrderTest<T> extends AggFunctionTestBase<T, GenericRow> { @Override protected Class<?> getAccClass() { return GenericRow.class; } @Override protected Method getRetractFunc() throws NoSuchMethodException { return getAggregator().getClass().getMethod("retract", getAccClass(), Object.class); } /** * Test LastValueWithRetractAggFunction for number type. */ public abstract static class NumberLastValueWithRetractAggFunctionWithoutOrderTest<T> extends LastValueWithRetractAggFunctionWithoutOrderTest<T> { protected abstract T getValue(String v); @Override protected List<List<T>> getInputValueSets() { return Arrays.asList( Arrays.asList( getValue("1"), null, getValue("-99"), getValue("3"), null ), Arrays.asList( null, null, null, null ), Arrays.asList( null, getValue("10"), null, getValue("3") ) ); } @Override protected List<T> getExpectedResults() { return Arrays.asList( getValue("3"), null, getValue("3") ); } } /** * Test for ByteLastValueWithRetractAggFunction. */ public static class ByteLastValueWithRetractAggFunctionWithoutOrderTest extends NumberLastValueWithRetractAggFunctionWithoutOrderTest<Byte> { @Override protected Byte getValue(String v) { return Byte.valueOf(v); } @Override protected AggregateFunction<Byte, GenericRow> getAggregator() { return new ByteLastValueWithRetractAggFunction(); } } /** * Test for ShortLastValueWithRetractAggFunction. */ public static class ShortLastValueWithRetractAggFunctionWithoutOrderTest extends NumberLastValueWithRetractAggFunctionWithoutOrderTest<Short> { @Override protected Short getValue(String v) { return Short.valueOf(v); } @Override protected AggregateFunction<Short, GenericRow> getAggregator() { return new ShortLastValueWithRetractAggFunction(); } } /** * Test for IntLastValueWithRetractAggFunction. */ public static class IntLastValueWithRetractAggFunctionWithoutOrderTest extends NumberLastValueWithRetractAggFunctionWithoutOrderTest<Integer> { @Override protected Integer getValue(String v) { return Integer.valueOf(v); } @Override protected AggregateFunction<Integer, GenericRow> getAggregator() { return new IntLastValueWithRetractAggFunction(); } } /** * Test for LongLastValueWithRetractAggFunction. */ public static class LongLastValueWithRetractAggFunctionWithoutOrderTest extends NumberLastValueWithRetractAggFunctionWithoutOrderTest<Long> { @Override protected Long getValue(String v) { return Long.valueOf(v); } @Override protected AggregateFunction<Long, GenericRow> getAggregator() { return new LongLastValueWithRetractAggFunction(); } } /** * Test for FloatLastValueWithRetractAggFunction. */ public static class FloatLastValueWithRetractAggFunctionWithoutOrderTest extends NumberLastValueWithRetractAggFunctionWithoutOrderTest<Float> { @Override protected Float getValue(String v) { return Float.valueOf(v); } @Override protected AggregateFunction<Float, GenericRow> getAggregator() { return new FloatLastValueWithRetractAggFunction(); } } /** * Test for DoubleLastValueWithRetractAggFunction. */ public static class DoubleLastValueWithRetractAggFunctionWithoutOrderTest extends NumberLastValueWithRetractAggFunctionWithoutOrderTest<Double> { @Override protected Double getValue(String v) { return Double.valueOf(v); } @Override protected AggregateFunction<Double, GenericRow> getAggregator() { return new DoubleLastValueWithRetractAggFunction(); } } /** * Test for BooleanLastValueWithRetractAggFunction. */ public static class BooleanLastValueWithRetractAggFunctionWithoutOrderTest extends LastValueWithRetractAggFunctionWithoutOrderTest<Boolean> { @Override protected List<List<Boolean>> getInputValueSets() { return Arrays.asList( Arrays.asList( false, false, false ), Arrays.asList( true, true, true ), Arrays.asList( true, false, null, true, false, true, null ), Arrays.asList( null, null, null ), Arrays.asList( null, true )); } @Override protected List<Boolean> getExpectedResults() { return Arrays.asList( false, true, true, null, true ); } @Override protected AggregateFunction<Boolean, GenericRow> getAggregator() { return new BooleanLastValueWithRetractAggFunction(); } } /** * Test for DecimalLastValueWithRetractAggFunction. */ public static class DecimalLastValueWithRetractAggFunctionWithoutOrderTest extends LastValueWithRetractAggFunctionWithoutOrderTest<Decimal> { private int precision = 20; private int scale = 6; @Override protected List<List<Decimal>> getInputValueSets() { return Arrays.asList( Arrays.asList( Decimal.castFrom("1", precision, scale), Decimal.castFrom("1000.000001", precision, scale), Decimal.castFrom("-1", precision, scale), Decimal.castFrom("-999.998999", precision, scale), null, Decimal.castFrom("0", precision, scale), Decimal.castFrom("-999.999", precision, scale), null, Decimal.castFrom("999.999", precision, scale) ), Arrays.asList( null, null, null, null, null ), Arrays.asList( null, Decimal.castFrom("0", precision, scale) ) ); } @Override protected List<Decimal> getExpectedResults() { return Arrays.asList( Decimal.castFrom("999.999", precision, scale), null, Decimal.castFrom("0", precision, scale) ); } @Override protected AggregateFunction<Decimal, GenericRow> getAggregator() { return new DecimalLastValueWithRetractAggFunction(DecimalTypeInfo.of(precision, scale)); } } /** * Test for StringLastValueWithRetractAggFunction. */ public static class StringLastValueWithRetractAggFunctionWithoutOrderTest extends LastValueWithRetractAggFunctionWithoutOrderTest<BinaryString> { @Override protected List<List<BinaryString>> getInputValueSets() { return Arrays.asList( Arrays.asList( BinaryString.fromString("abc"), BinaryString.fromString("def"), BinaryString.fromString("ghi"), null, BinaryString.fromString("jkl"), null, BinaryString.fromString("zzz") ), Arrays.asList( null, null ), Arrays.asList( null, BinaryString.fromString("a") ), Arrays.asList( BinaryString.fromString("x"), null, BinaryString.fromString("e") ) ); } @Override protected List<BinaryString> getExpectedResults() { return Arrays.asList( BinaryString.fromString("zzz"), null, BinaryString.fromString("a"), BinaryString.fromString("e") ); } @Override protected AggregateFunction<BinaryString, GenericRow> getAggregator() { return new StringLastValueWithRetractAggFunction(); } } }
/*- * -\-\- * rkt-launcher * -- * * -- * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * -/-/- */ package io.honnix.rkt.launcher.remote; import static io.honnix.rkt.launcher.remote.RktLauncherRemote.RktLauncherRemoteBuilder.Scheme.HTTP; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertSame; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import com.spotify.apollo.Client; import io.honnix.rkt.launcher.options.FetchOptions; import io.honnix.rkt.launcher.options.StopOptions; import io.honnix.rkt.launcher.options.TrustOptions; import io.honnix.rkt.launcher.remote.command.RktCommandRemote; import io.honnix.rkt.launcher.remote.command.RktImageCommandRemote; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; @RunWith(MockitoJUnitRunner.class) public class RktLauncherRemoteImplTest { private RktLauncherRemote rktLauncherRemote; @Mock private RktCommandRemote rktCommandRemote; @Mock private RktImageCommandRemote rktImageCommandRemote; @Before public void setUp() throws Exception { rktLauncherRemote = new RktLauncherRemoteImpl(rktCommandRemote, rktImageCommandRemote); } /** * This is purely for coverage. */ @Test public void shouldConstruct() { assertNotNull(RktLauncherRemote.builder() .scheme(HTTP) .host("localhost") .port(80) .client(mock(Client.class)) .build()); } /** * This is for jacoco enum coverage limitation. * https://github.com/jacoco/jacoco/wiki/FilteringOptions */ @Test public void shouldReturnAllValues() { assertEquals(2, RktLauncherRemote.RktLauncherRemoteBuilder.Scheme.values().length); } /** * This is for jacoco enum coverage limitation. * https://github.com/jacoco/jacoco/wiki/FilteringOptions */ @Test public void shouldParseFromString() { assertSame(RktLauncherRemote.RktLauncherRemoteBuilder.Scheme.HTTPS, RktLauncherRemote.RktLauncherRemoteBuilder.Scheme.valueOf("HTTPS")); } @Test public void shouldReturnImageCommandRemote() { assertSame(rktImageCommandRemote, rktLauncherRemote.image()); } @Test public void shouldProxyCatManifest() { rktLauncherRemote.catManifest("id"); verify(rktCommandRemote).catManifest("id"); } @Test public void shouldProxyConfig() { rktLauncherRemote.config(); verify(rktCommandRemote).config(); } @Test public void shouldProxyFetch() { rktLauncherRemote.fetch(FetchOptions.builder().build(), true, "image1", "image2"); verify(rktCommandRemote).fetch(FetchOptions.builder().build(), true, "image1", "image2"); } @Test public void shouldProxyFetchWithoutOptions() { rktLauncherRemote.fetch(false, "image1", "image2"); verify(rktCommandRemote).fetch(false, "image1", "image2"); } @Test public void shouldProxyGc() { rktLauncherRemote.gc(null); verify(rktCommandRemote).gc(null); } @Test public void shouldProxyGcWithoutOptions() { rktLauncherRemote.gc(); verify(rktCommandRemote).gc(); } @Test public void shouldProxyList() { rktLauncherRemote.list(); verify(rktCommandRemote).list(); } @Test public void shouldProxyPrepare() { rktLauncherRemote.prepare(null, true); verify(rktCommandRemote).prepare(null, true); } @Test public void shouldProxyRm() { rktLauncherRemote.rm(null, "id1", "id2"); verify(rktCommandRemote).rm(null, "id1", "id2"); } @Test public void shouldProxyRun() { rktLauncherRemote.run(null, true); verify(rktCommandRemote).run(null, true); } @Test public void shouldProxyRunPrepared() { rktLauncherRemote.runPrepared(null, "id", true); verify(rktCommandRemote).runPrepared(null, "id", true); } @Test public void shouldProxyRunPreparedWithoutOptions() { rktLauncherRemote.runPrepared("id", true); verify(rktCommandRemote).runPrepared("id", true); } @Test public void shouldProxyStatus() { rktLauncherRemote.status(null, "id"); verify(rktCommandRemote).status(null, "id"); } @Test public void shouldProxyStatusWithoutOptions() { rktLauncherRemote.status("id"); verify(rktCommandRemote).status("id"); } @Test public void shouldProxyStop() { rktLauncherRemote.stop(StopOptions.builder().build(), "id1", "id2"); verify(rktCommandRemote).stop(StopOptions.builder().build(), "id1", "id2"); } @Test public void shouldProxyStopWithoutOptions() { rktLauncherRemote.stop("id1", "id2"); verify(rktCommandRemote).stop("id1", "id2"); } @Test public void shouldProxyTrust() { rktLauncherRemote.trust(TrustOptions.builder().build(), "pubkey1", "pubkey2"); verify(rktCommandRemote).trust(TrustOptions.builder().build(), "pubkey1", "pubkey2"); } @Test public void shouldProxyTrustWithoutOptions() { rktLauncherRemote.trust("pubkey1", "pubkey2"); verify(rktCommandRemote).trust("pubkey1", "pubkey2"); } @Test public void shouldProxyVersion() { rktLauncherRemote.version(); verify(rktCommandRemote).version(); } }
/* * Copyright 2015 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package org.jbosson.plugins.amq.jmx; import org.apache.activemq.artemis.api.core.management.ObjectNameBuilder; import org.apache.activemq.artemis.core.config.Configuration; import org.apache.activemq.artemis.core.security.jaas.PropertiesLoginModuleTest; import org.apache.activemq.artemis.core.server.ActiveMQServer; import org.apache.activemq.artemis.core.server.ActiveMQServers; import org.apache.activemq.artemis.core.server.management.ManagementContext; import org.apache.activemq.artemis.core.settings.impl.AddressFullMessagePolicy; import org.apache.activemq.artemis.core.settings.impl.AddressSettings; import org.apache.activemq.artemis.jms.client.ActiveMQConnectionFactory; import org.apache.activemq.artemis.spi.core.security.ActiveMQJAASSecurityManager; import org.apache.activemq.artemis.spi.core.security.ActiveMQSecurityManager; import org.jbosson.plugins.amq.AmqJonTestBase; import org.jbosson.plugins.amq.ArtemisServiceComponent; import org.jbosson.plugins.amq.OpParameter; import org.jbosson.plugins.amq.OperationInfo; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.rules.TemporaryFolder; import org.mc4j.ems.connection.ConnectionFactory; import org.mc4j.ems.connection.EmsConnection; import org.mc4j.ems.connection.bean.EmsBean; import org.mc4j.ems.connection.settings.ConnectionSettings; import org.mc4j.ems.connection.support.ConnectionProvider; import org.mc4j.ems.connection.support.metadata.JSR160ConnectionTypeDescriptor; import org.rhq.core.clientapi.descriptor.configuration.ConfigurationDescriptor; import org.rhq.core.clientapi.descriptor.configuration.ConfigurationProperty; import org.rhq.core.clientapi.descriptor.configuration.ListProperty; import org.rhq.core.clientapi.descriptor.configuration.MapProperty; import org.rhq.core.clientapi.descriptor.configuration.PropertyType; import org.rhq.core.clientapi.descriptor.configuration.SimpleProperty; import org.rhq.core.clientapi.descriptor.plugin.OperationDescriptor; import javax.jms.Connection; import javax.management.InstanceAlreadyExistsException; import javax.management.MBeanRegistrationException; import javax.management.MBeanServer; import javax.management.MalformedObjectNameException; import javax.management.NotCompliantMBeanException; import javax.management.ObjectName; import javax.management.remote.JMXConnectorServer; import javax.management.remote.JMXConnectorServerFactory; import javax.management.remote.JMXServiceURL; import javax.xml.bind.JAXBElement; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.lang.management.ManagementFactory; import java.net.URL; import java.net.URLDecoder; import java.rmi.registry.LocateRegistry; import java.rmi.registry.Registry; import java.rmi.server.UnicastRemoteObject; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class AmqJonRuntimeTestBase extends AmqJonTestBase { static { String path = System.getProperty("java.security.auth.login.config"); if (path == null) { URL resource = PropertiesLoginModuleTest.class.getClassLoader().getResource("login.config"); if (resource != null) { try { path = URLDecoder.decode(resource.getFile(), "UTF-8"); System.setProperty("java.security.auth.login.config", path); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } } } System.setProperty("java.rmi.server.hostname", "localhost"); } @Rule public TemporaryFolder tmpTestFolder = new TemporaryFolder(); protected ActiveMQServer server; protected MBeanServer mbeanServer; protected String brokerName = "amq"; protected ObjectNameBuilder objectNameBuilder; protected ConnectionFactory emsFactory; protected EmsConnection emsConnection; protected ManagementContext mcontext; protected int jmxPort = 11099; protected List<Connection> connections = new ArrayList<Connection>(); protected javax.jms.ConnectionFactory factory; protected String jmxServiceURL = null; private JMXConnectorServer connectorServer = null; protected ArtemisServiceComponent brokerComponent; protected EmsBean brokerBean; protected Registry registry; //make sure the jmx Registry only created once private static boolean jmxRegistryCreated = false; @Before public void setUp() throws Exception { super.setUp(); leakCheckRule.disable(); jmxServiceURL = "service:jmx:rmi://localhost/jndi/rmi://localhost:" + jmxPort + "/jmxrmi"; mcontext = configureJmxAccess(); if (mcontext != null) mcontext.start(); server = createServerWithJaas(); Configuration serverConfig = server.getConfiguration(); serverConfig.setJMXManagementEnabled(true); serverConfig.setName(brokerName); String dataDir = this.temporaryFolder.getRoot().getAbsolutePath(); serverConfig.setPagingDirectory(dataDir + "/" + serverConfig.getPagingDirectory()); serverConfig.setBindingsDirectory(dataDir + "/" + serverConfig.getBindingsDirectory()); serverConfig.setLargeMessagesDirectory(dataDir + "/" + serverConfig.getLargeMessagesDirectory()); serverConfig.setJournalDirectory(dataDir + "/" + serverConfig.getJournalDirectory()); mbeanServer = ManagementFactory.getPlatformMBeanServer(); server.setMBeanServer(mbeanServer); server.start(); factory = new ActiveMQConnectionFactory("tcp://localhost:61616"); objectNameBuilder = server.getManagementService().getObjectNameBuilder(); if (mcontext == null) connectJmx(); System.out.println("server name: " + server.getConfiguration().getName()); emsFactory = new ConnectionFactory(); connectEms(); brokerBean = getAmQServerBean(); brokerComponent = new ArtemisServiceComponent(); } private ActiveMQServer createServerWithJaas() throws Exception { Configuration configuration = this.createDefaultConfig(true); ActiveMQSecurityManager securityManager = new ActiveMQJAASSecurityManager("activemq"); ActiveMQServer server = this.addServer(ActiveMQServers.newActiveMQServer(configuration, ManagementFactory.getPlatformMBeanServer(), securityManager, true)); AddressSettings defaultSetting = (new AddressSettings()).setPageSizeBytes(10485760L).setMaxSizeBytes(-1L).setAddressFullMessagePolicy(AddressFullMessagePolicy.PAGE); server.getAddressSettingsRepository().addMatch("#", defaultSetting); return server; } private void connectEms() { ConnectionSettings emsConnectionSettings = new ConnectionSettings(); JSR160ConnectionTypeDescriptor descriptor = new JSR160ConnectionTypeDescriptor(); emsConnectionSettings.initializeConnectionType(descriptor); emsConnectionSettings.setServerUrl(jmxServiceURL); emsConnectionSettings.setPrincipal(getJmxPrincipal()); emsConnectionSettings.setCredentials(getJmxCredentials()); ConnectionProvider provider = emsFactory.getConnectionProvider(emsConnectionSettings); emsConnection = provider.connect(); emsConnection.loadSynchronous(true); } protected String getJmxCredentials() { return null; } protected String getJmxPrincipal() { return null; } //setup and start ManagementContext protected ManagementContext configureJmxAccess() throws Exception { return null; } @After public void tearDown() throws Exception { emsConnection.close(); if (connectorServer != null) { connectorServer.stop(); connectorServer = null; } if (mcontext != null) { mcontext.stop(); mcontext = null; } if (jmxRegistryCreated) { UnicastRemoteObject.unexportObject(registry, true); registry = null; jmxRegistryCreated = false; } for (Connection conn : connections) { try { conn.close(); } catch (Exception e) { //ignore } } server.stop(); System.out.println("server stopped"); super.tearDown(); } private void connectJmx() throws IOException, MalformedObjectNameException, NotCompliantMBeanException, InstanceAlreadyExistsException, MBeanRegistrationException { if (!jmxRegistryCreated) { registry = LocateRegistry.createRegistry(jmxPort); jmxRegistryCreated = true; } HashMap<String,Object> env = getJmxConnectorEnv(); JMXServiceURL url = new JMXServiceURL(jmxServiceURL); connectorServer = JMXConnectorServerFactory.newJMXConnectorServer(url, env, mbeanServer); connectorServer.start(); } protected HashMap<String,Object> getJmxConnectorEnv() throws IOException { return null; } protected EmsBean getAmQServerBean() throws Exception { ObjectName broker = objectNameBuilder.getActiveMQServerObjectName(); List<EmsBean> beans = emsConnection.queryBeans(broker.toString()); assertEquals("There should be one and only broker bean", 1, beans.size()); return beans.get(0); } protected OperationInfo getBrokerOperation(String opName, Class... types) { List<OperationDescriptor> ops = brokerService.getOperation(); for (OperationDescriptor p : ops) { if (p.getName().equals(opName)) { if (paramMatch(p.getParameters(), types)) { return new OperationInfo(opName, types); } } } throw new IllegalArgumentException("Cannot find operation: " + opName); } protected boolean paramMatch(ConfigurationDescriptor params, Class[] types) { if (params == null) { if (types == null || types.length == 0) { return true; } else { return false; } } List<JAXBElement<? extends ConfigurationProperty>> listParams = params.getConfigurationProperty(); if (types == null || types.length == 0) { if (listParams.size() == 0) { return true; } else { return false; } } if (listParams.size() != types.length) { return false; } for (int i = 0; i < types.length; i++) { JAXBElement<? extends ConfigurationProperty> elem = listParams.get(i); Class type1 = types[i]; ConfigurationProperty prop = elem.getValue(); if (prop instanceof SimpleProperty) { SimpleProperty simple = (SimpleProperty) prop; PropertyType type = simple.getType(); if (type != OpParameter.convert(type1)) { return false; } } else if (prop instanceof ListProperty) { if (!type1.isArray() && type1 != List.class) { return false; } } else if (prop instanceof MapProperty) { if (type1 != Map.class) { return false; } } } return true; } protected void createConnection() throws Exception { Connection conn = factory.createConnection(); connections.add(conn); } }
// // MessagePack for Java // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package org.msgpack.core.buffer; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.nio.ByteBuffer; import java.security.AccessController; import java.security.PrivilegedAction; import sun.misc.Unsafe; /** * Wraps the difference of access methods to DirectBuffers between Android and others. */ class DirectBufferAccess { private DirectBufferAccess() {} enum DirectBufferConstructorType { ARGS_LONG_INT_REF, ARGS_LONG_INT, ARGS_INT_INT, ARGS_MB_INT_INT } static Method mGetAddress; // For Java <=8, gets a sun.misc.Cleaner static Method mCleaner; static Method mClean; // For Java >=9, invokes a jdk.internal.ref.Cleaner static Method mInvokeCleaner; // TODO We should use MethodHandle for efficiency, but it is not available in JDK6 static Constructor<?> byteBufferConstructor; static Class<?> directByteBufferClass; static DirectBufferConstructorType directBufferConstructorType; static Method memoryBlockWrapFromJni; static { try { final ByteBuffer direct = ByteBuffer.allocateDirect(1); // Find the hidden constructor for DirectByteBuffer directByteBufferClass = direct.getClass(); Constructor<?> directByteBufferConstructor = null; DirectBufferConstructorType constructorType = null; Method mbWrap = null; try { // TODO We should use MethodHandle for Java7, which can avoid the cost of boxing with JIT optimization directByteBufferConstructor = directByteBufferClass.getDeclaredConstructor(long.class, int.class, Object.class); constructorType = DirectBufferConstructorType.ARGS_LONG_INT_REF; } catch (NoSuchMethodException e0) { try { // https://android.googlesource.com/platform/libcore/+/master/luni/src/main/java/java/nio/DirectByteBuffer.java // DirectByteBuffer(long address, int capacity) directByteBufferConstructor = directByteBufferClass.getDeclaredConstructor(long.class, int.class); constructorType = DirectBufferConstructorType.ARGS_LONG_INT; } catch (NoSuchMethodException e1) { try { directByteBufferConstructor = directByteBufferClass.getDeclaredConstructor(int.class, int.class); constructorType = DirectBufferConstructorType.ARGS_INT_INT; } catch (NoSuchMethodException e2) { Class<?> aClass = Class.forName("java.nio.MemoryBlock"); mbWrap = aClass.getDeclaredMethod("wrapFromJni", int.class, long.class); mbWrap.setAccessible(true); directByteBufferConstructor = directByteBufferClass.getDeclaredConstructor(aClass, int.class, int.class); constructorType = DirectBufferConstructorType.ARGS_MB_INT_INT; } } } byteBufferConstructor = directByteBufferConstructor; directBufferConstructorType = constructorType; memoryBlockWrapFromJni = mbWrap; if (byteBufferConstructor == null) { throw new RuntimeException("Constructor of DirectByteBuffer is not found"); } byteBufferConstructor.setAccessible(true); mGetAddress = directByteBufferClass.getDeclaredMethod("address"); mGetAddress.setAccessible(true); if (MessageBuffer.javaVersion <= 8) { setupCleanerJava6(direct); } else { setupCleanerJava9(direct); } } catch (Exception e) { throw new RuntimeException(e); } } private static void setupCleanerJava6(final ByteBuffer direct) { Object obj; obj = AccessController.doPrivileged(new PrivilegedAction<Object>() { @Override public Object run() { return getCleanerMethod(direct); } }); if (obj instanceof Throwable) { throw new RuntimeException((Throwable) obj); } mCleaner = (Method) obj; obj = AccessController.doPrivileged(new PrivilegedAction<Object>() { @Override public Object run() { return getCleanMethod(direct, mCleaner); } }); if (obj instanceof Throwable) { throw new RuntimeException((Throwable) obj); } mClean = (Method) obj; } private static void setupCleanerJava9(final ByteBuffer direct) { Object obj = AccessController.doPrivileged(new PrivilegedAction<Object>() { @Override public Object run() { return getInvokeCleanerMethod(direct); } }); if (obj instanceof Throwable) { throw new RuntimeException((Throwable) obj); } mInvokeCleaner = (Method) obj; } /** * Checks if we have a usable {@link DirectByteBuffer#cleaner}. * @param direct a direct buffer * @return the method or an error */ private static Object getCleanerMethod(ByteBuffer direct) { try { Method m = direct.getClass().getDeclaredMethod("cleaner"); m.setAccessible(true); m.invoke(direct); return m; } catch (NoSuchMethodException e) { return e; } catch (InvocationTargetException e) { return e; } catch (IllegalAccessException e) { return e; } } /** * Checks if we have a usable {@link sun.misc.Cleaner#clean}. * @param direct a direct buffer * @param mCleaner the {@link DirectByteBuffer#cleaner} method * @return the method or null */ private static Object getCleanMethod(ByteBuffer direct, Method mCleaner) { try { Method m = mCleaner.getReturnType().getDeclaredMethod("clean"); Object c = mCleaner.invoke(direct); m.setAccessible(true); m.invoke(c); return m; } catch (NoSuchMethodException e) { return e; } catch (InvocationTargetException e) { return e; } catch (IllegalAccessException e) { return e; } } /** * Checks if we have a usable {@link Unsafe#invokeCleaner}. * @param direct a direct buffer * @return the method or an error */ private static Object getInvokeCleanerMethod(ByteBuffer direct) { try { // See https://bugs.openjdk.java.net/browse/JDK-8171377 Method m = MessageBuffer.unsafe.getClass().getDeclaredMethod( "invokeCleaner", ByteBuffer.class); m.invoke(MessageBuffer.unsafe, direct); return m; } catch (NoSuchMethodException e) { return e; } catch (InvocationTargetException e) { return e; } catch (IllegalAccessException e) { return e; } } static long getAddress(Object base) { try { return (Long) mGetAddress.invoke(base); } catch (IllegalAccessException e) { throw new RuntimeException(e); } catch (InvocationTargetException e) { throw new RuntimeException(e); } } static void clean(Object base) { try { if (MessageBuffer.javaVersion <= 8) { Object cleaner = mCleaner.invoke(base); mClean.invoke(cleaner); } else { mInvokeCleaner.invoke(MessageBuffer.unsafe, base); } } catch (Throwable e) { throw new RuntimeException(e); } } static boolean isDirectByteBufferInstance(Object s) { return directByteBufferClass.isInstance(s); } static ByteBuffer newByteBuffer(long address, int index, int length, ByteBuffer reference) { try { switch (directBufferConstructorType) { case ARGS_LONG_INT_REF: return (ByteBuffer) byteBufferConstructor.newInstance(address + index, length, reference); case ARGS_LONG_INT: return (ByteBuffer) byteBufferConstructor.newInstance(address + index, length); case ARGS_INT_INT: return (ByteBuffer) byteBufferConstructor.newInstance((int) address + index, length); case ARGS_MB_INT_INT: return (ByteBuffer) byteBufferConstructor.newInstance( memoryBlockWrapFromJni.invoke(null, address + index, length), length, 0); default: throw new IllegalStateException("Unexpected value"); } } catch (Throwable e) { // Convert checked exception to unchecked exception throw new RuntimeException(e); } } }
/** * Copyright (C) 2008 Mathieu Carbou <mathieu.carbou@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mycila.testing.plugins.jetty.locator; import static com.google.common.collect.Iterables.elementsEqual; import static com.google.common.collect.Lists.newArrayList; import static org.junit.Assert.assertTrue; import org.junit.Assert; import org.junit.Test; import org.springframework.util.AntPathMatcher; import com.mycila.testing.plugins.jetty.locator.AntPath; /** * Unit test of {@link AntPath}. */ public class AntPathTest { @Test public void testRegex() { final String[] trueDatas = { "com/test.jsp", "com/a/test.jsp", "com/a/b/test.jsp", "com/a/b/test.jsp/c/test.jsp", "com/a/b/atest.jsp/c/test.jsp", }; final String[] falseDatas = { "com/atest.jsp", "com//test.jsp", "com/a/b/atest.jsp", }; final String[] patterns = { "com/([^/]+/)*test.jsp", "com/([^/]+?/)*test.jsp", "com/([^/]++/)*test.jsp", "com/([^/]+/)*?test.jsp", "com/([^/]+?/)*?test.jsp", "com/([^/]++/)*?test.jsp", "com/([^/]+/)*+test.jsp", "com/([^/]+?/)*+test.jsp", "com/([^/]++/)*+test.jsp", }; for (final String pattern : patterns) { //System.out.println("for " + pattern); for (final String data : trueDatas) { //System.out.println("\t" + data); Assert.assertTrue(data.matches(pattern)); } } for (final String pattern : patterns) { //System.out.println("!for " + pattern); for (final String data : falseDatas) { //System.out.println("\t" + data); Assert.assertFalse(data.matches(pattern)); } } } @Test public void testSplitFunction() { assertTrue(elementsEqual(newArrayList("test"), new AntPath.SplitFunction("**").apply("test"))); assertTrue(elementsEqual( newArrayList("te", "**", "st", "**", "or"), new AntPath.SplitFunction("**").apply("te**st**or"))); assertTrue(elementsEqual( newArrayList("", "**", "te", "**", "st", "**", "or", "**", ""), new AntPath.SplitFunction("**").apply("**te**st**or**"))); } @Test public void testMultiSplitFunction() { assertTrue(elementsEqual(newArrayList("test"), new AntPath.MultiSplitFunction("*", "**").apply("test"))); assertTrue(elementsEqual( newArrayList("te", "*", "st", "**", "or"), new AntPath.MultiSplitFunction("**", "*").apply("te*st**or"))); assertTrue(elementsEqual( newArrayList("te", "**", "st", "*", "or"), new AntPath.MultiSplitFunction("**", "*").apply("te**st*or"))); } @Test public void testSpringAntPathMatcherAntPathZeroOrMoreCharacters() { for (final Object[] data : getAntPathZeroOrMoreCharacters()) { final Boolean expected = (Boolean) data[0]; final String pattern = (String) data[1]; final String path = (String) data[2]; Assert.assertEquals( "expects " + expected + " path '" + path + "' matches '" + pattern + "'", expected, new AntPathMatcher().match(pattern, path)); } } @Test public void testSpringAntPathMatcherAntPathZeroOrMoreDirectories() { for (final Object[] data : getAntPathZeroOrMoreDirectories()) { final Boolean expected = (Boolean) data[0]; final String pattern = (String) data[1]; final String path = (String) data[2]; Assert.assertEquals( "expects " + expected + " path '" + path + "' matches '" + pattern + "'", expected, new AntPathMatcher().match(pattern, path)); } } @Test public void testMatchesAntPathOneCharacter() { for (final Object[] data : getAntPathOneCharacter()) { final Boolean expected = (Boolean) data[0]; final String pattern = (String) data[1]; final String path = (String) data[2]; Assert.assertEquals( "expects " + expected + " path '" + path + "' matches '" + pattern + "'", expected, new AntPath(pattern).matches(path)); } } @Test public void testMatchesAntPathZeroOrMoreCharacters() { for (final Object[] data : getAntPathZeroOrMoreCharacters()) { final Boolean expected = (Boolean) data[0]; final String pattern = (String) data[1]; final String path = (String) data[2]; Assert.assertEquals( "expects " + expected + " path '" + path + "' matches '" + pattern + "'", expected, new AntPath(pattern).matches(path)); } } @Test public void testMatchesAntPathZeroOrMoreDirectories() { for (final Object[] data : getAntPathZeroOrMoreDirectories()) { final Boolean expected = (Boolean) data[0]; final String pattern = (String) data[1]; final String path = (String) data[2]; Assert.assertEquals( "expects " + expected + " path '" + path + "' matches '" + pattern + "'", expected, new AntPath(pattern).matches(path)); } } private static Object[][] getAntPathOneCharacter() { //@formatter:off final Object[][] data = { {true, "com/t?st.jsp", "com/test.jsp"}, {true, "com/t?st.jsp", "com/tast.jsp"}, {true, "com/t?st.jsp", "com/txst.jsp"}, {false, "com/t?st.jsp", "com/tst.jsp"}, {false, "com/t?st.jsp", "com/teest.jsp"}, {false, "com/t?st.jsp", "com/t/st.jsp"}, }; //@formatter:on return data; } private static Object[][] getAntPathZeroOrMoreCharacters() { //@formatter:off final Object[][] data = { // matches all .jsp files in the com directory {true, "com/*.jsp", "com/.jsp"}, {true, "com/*.jsp", "com/file.jsp"}, {true, "com/*.jsp", "com/file.data.jsp"}, {false, "com/*.jsp", "com/path/file.jsp"}, {false, "com/*.jsp", "comfile.jsp"}, {true, "com/f*e.jsp", "com/file.jsp"}, {true, "com/f*p", "com/file.jsp"}, }; //@formatter:on return data; } private static Object[][] getAntPathZeroOrMoreDirectories() { //@formatter:off final Object[][] data = { // matches all test.jsp files underneath the com path {true, "com/**/test.jsp", "com/test.jsp"}, {true, "com/**/test.jsp", "com/path/test.jsp"}, {true, "com/**/test.jsp", "com/path/to/test.jsp"}, {false, "com/**/test.jsp", "com/atest.jsp"}, // incompatibility between SpringAntPath and this {true, "com/**/test.jsp", "com//test.jsp"}, {false, "com/**/test.jsp", "com/path/to/ttest.jsp"}, // incompatibility between SpringAntPath and this {false, "com/***/test.jsp", "com/path/to2/test.jsp"}, // incompatibility between SpringAntPath and this {false, "com/*******/test.jsp", "com/path/to3/test.jsp"}, // matches all .jsp files underneath the org/springframework path {true, "org/springframework/**/*.jsp", "org/springframework/test.jsp"}, {true, "org/springframework/**/*.jsp", "org/springframework/path/test.jsp"}, {true, "org/springframework/**/*.jsp", "org/springframework/path/to/test.jsp"}, {false, "org/springframework/**/*.jsp", "org.springframework/path/to/test.jsp"}, // matches org/springframework/servlet/bla.jsp but also org/springframework/testing/servlet/bla.jsp and org/servlet/bla.jsp {true, "org/**/servlet/bla.jsp", "org/springframework/servlet/bla.jsp"}, {true, "org/**/servlet/bla.jsp", "org/springframework/testing/servlet/bla.jsp"}, {true, "org/**/servlet/bla.jsp", "org/servlet/bla.jsp"}, {false, "org/**/servlet/bla.jsp", "org.servlet/bla.jsp"}, {false, "org/**/servlet/bla.jsp", "org/servlet/blu.jsp"}, {true, "?", "t"}, {true, "*", "test.jsp"}, {true, "**", "com/path/to/test.jsp"}, {true, "com/**", "com/"}, {true, "com/**", "com/path/to/"}, {true, "com/**", "com/path/to"}, {true, "com/**", "com/test.jsp"}, {true, "com/**", "com/path/to/test.jsp"}, {true, "com/**/", "com/"}, {true, "com/**/", "com/path/to/"}, // incompatibility between SpringAntPath and this {true, "com/**/", "com/path/to"}, // incompatibility between SpringAntPath and this {true, "com/**/", "com/test.jsp"}, // incompatibility between SpringAntPath and this {true, "com/**/", "com/path/to/test.jsp"}, }; //@formatter:on return data; } }
// Copyright 2017 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.android; import static com.google.common.truth.Truth.assertThat; import static com.google.devtools.build.lib.actions.util.ActionsTestUtil.getFirstArtifactEndingWith; import com.google.common.collect.Iterables; import com.google.devtools.build.lib.actions.Action; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.analysis.FileProvider; import com.google.devtools.build.lib.analysis.FilesToRunProvider; import com.google.devtools.build.lib.analysis.RunfilesProvider; import com.google.devtools.build.lib.analysis.actions.TemplateExpansionAction; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.skyframe.ConfiguredTargetAndData; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link AndroidInstrumentationTest}. */ @RunWith(JUnit4.class) public class AndroidInstrumentationTestTest extends AndroidBuildViewTestCase { @Before public void setup() throws Exception { scratch.file( "java/com/app/BUILD", "android_binary(", " name = 'app',", " manifest = 'AndroidManifest.xml',", ")", "android_binary(", " name = 'support',", " manifest = 'AndroidManifest.xml',", ")"); scratch.file( "javatests/com/app/BUILD", "android_binary(", " name = 'instrumentation_app',", " instruments = '//java/com/app',", " manifest = 'AndroidManifest.xml',", ")", "android_device_script_fixture(", " name = 'device_fixture',", " cmd = 'foo bar',", ")", "android_host_service_fixture(", " name = 'host_fixture',", " executable = '//java/com/server',", " service_names = ['foo', 'bar'],", ")"); scratch.file( "java/com/server/BUILD", "java_binary(", " name = 'server',", " main_class = 'does.not.exist',", " srcs = [],", ")"); scratch.file( "javatests/com/app/ait/BUILD", "android_instrumentation_test(", " name = 'ait',", " test_app = '//javatests/com/app:instrumentation_app',", " target_device = '//tools/android/emulated_device:nexus_6',", " fixtures = [", " '//javatests/com/app:device_fixture',", " '//javatests/com/app:host_fixture',", " ],", " support_apks = [", " '//java/com/app:support',", " ],", " data = [", " 'foo.txt',", " ],", ")"); setupTargetDevice(); } // TODO(ajmichael): Share this with AndroidDeviceTest.java private void setupTargetDevice() throws Exception { scratch.file( "tools/android/emulated_device/BUILD", "filegroup(", " name = 'emulator_images_android_21_x86',", " srcs = [", " 'android_21/x86/kernel-qemu',", " 'android_21/x86/ramdisk.img',", " 'android_21/x86/source.properties',", " 'android_21/x86/system.img.tar.gz',", " 'android_21/x86/userdata.img.tar.gz'", " ],", ")", "android_device(", " name = 'nexus_6',", " ram = 2047,", " horizontal_resolution = 720, ", " vertical_resolution = 1280, ", " cache = 32, ", " system_image = ':emulator_images_android_21_x86',", " screen_density = 280, ", " vm_heap = 256", ")"); } @Test public void testTestExecutableRunfiles() throws Exception { ConfiguredTargetAndData androidInstrumentationTest = getConfiguredTargetAndData("//javatests/com/app/ait"); NestedSet<Artifact> runfiles = androidInstrumentationTest .getConfiguredTarget() .getProvider(RunfilesProvider.class) .getDefaultRunfiles() .getAllArtifacts(); assertThat(runfiles) .containsAllIn( getHostConfiguredTarget("//tools/android/emulated_device:nexus_6") .getProvider(RunfilesProvider.class) .getDefaultRunfiles() .getAllArtifacts()); assertThat(runfiles) .containsAllIn( getHostConfiguredTarget("//java/com/server") .getProvider(RunfilesProvider.class) .getDefaultRunfiles() .getAllArtifacts()); assertThat(runfiles) .containsAllIn( getHostConfiguredTarget( androidInstrumentationTest .getTarget() .getAssociatedRule() .getAttrDefaultValue("$test_entry_point") .toString()) .getProvider(RunfilesProvider.class) .getDefaultRunfiles() .getAllArtifacts()); assertThat(runfiles) .containsAllOf( getDeviceFixtureScript(getConfiguredTarget("//javatests/com/app:device_fixture")), getInstrumentationApk(getConfiguredTarget("//javatests/com/app:instrumentation_app")), getTargetApk(getConfiguredTarget("//javatests/com/app:instrumentation_app")), Iterables.getOnlyElement( getConfiguredTarget("//javatests/com/app/ait:foo.txt") .getProvider(FileProvider.class) .getFilesToBuild())); } @Test public void testTestExecutableContents() throws Exception { ConfiguredTarget androidInstrumentationTest = getConfiguredTarget("//javatests/com/app/ait"); assertThat(androidInstrumentationTest).isNotNull(); String testExecutableScript = getTestStubContents(androidInstrumentationTest); assertThat(testExecutableScript) .contains("instrumentation_apk=\"javatests/com/app/instrumentation_app.apk\""); assertThat(testExecutableScript).contains("target_apk=\"java/com/app/app.apk\""); assertThat(testExecutableScript).contains("support_apks=\"java/com/app/support.apk\""); assertThat(testExecutableScript) .contains( "declare -A device_script_fixtures=( " + "[javatests/com/app/cmd_device_fixtures/device_fixture/cmd.sh]=false,true )"); assertThat(testExecutableScript).contains("host_service_fixture=\"java/com/server/server\""); assertThat(testExecutableScript).contains("host_service_fixture_services=\"foo,bar\""); assertThat(testExecutableScript) .contains("device_script=\"${WORKSPACE_DIR}/tools/android/emulated_device/nexus_6\""); assertThat(testExecutableScript).contains("data_deps=\"javatests/com/app/ait/foo.txt\""); } @Test public void testAtMostOneHostServiceFixture() throws Exception { checkError( "javatests/com/app/ait2", "ait", "android_instrumentation_test accepts at most one android_host_service_fixture", "android_host_service_fixture(", " name = 'host_fixture',", " executable = '//java/com/server',", " service_names = ['foo', 'bar'],", ")", "android_instrumentation_test(", " name = 'ait',", " test_app = '//javatests/com/app:instrumentation_app',", " target_device = '//tools/android/emulated_device:nexus_6',", " fixtures = [", " ':host_fixture',", " '//javatests/com/app:host_fixture',", " ],", ")"); } @Test public void testInstrumentationBinaryIsInstrumenting() throws Exception { checkError( "javatests/com/app/instr", "ait", "The android_binary target //javatests/com/app/instr:app " + "is missing an 'instruments' attribute", "android_binary(", " name = 'app',", " srcs = ['a.java'],", " manifest = 'AndroidManifest.xml',", ")", "android_instrumentation_test(", " name = 'ait',", " test_app = ':app',", " target_device = '//tools/android/emulated_device:nexus_6',", ")"); } @Test public void testAndroidInstrumentationTestWithSkylarkDevice() throws Exception { scratch.file( "javatests/com/app/skylarkdevice/local_adb_device.bzl", "def _impl(ctx):", " ctx.actions.write(output=ctx.outputs.executable, content='', is_executable=True)", " return [android_common.create_device_broker_info('LOCAL_ADB_SERVER')]", "local_adb_device = rule(implementation=_impl, executable=True)"); scratch.file( "javatests/com/app/skylarkdevice/BUILD", "load(':local_adb_device.bzl', 'local_adb_device')", "local_adb_device(name = 'local_adb_device')", "android_instrumentation_test(", " name = 'ait',", " test_app = '//javatests/com/app:instrumentation_app',", " target_device = ':local_adb_device',", ")"); String testExecutableScript = getTestStubContents(getConfiguredTarget("//javatests/com/app/skylarkdevice:ait")); assertThat(testExecutableScript).contains("device_broker_type=\"LOCAL_ADB_SERVER\""); } private static Artifact getDeviceFixtureScript(ConfiguredTarget deviceScriptFixture) { return getFirstArtifactEndingWith( deviceScriptFixture.getProvider(FileProvider.class).getFilesToBuild(), ".sh"); } private static Artifact getInstrumentationApk(ConfiguredTarget instrumentation) { return instrumentation.get(AndroidInstrumentationInfo.PROVIDER).getInstrumentationApk(); } private static Artifact getTargetApk(ConfiguredTarget instrumentation) { return instrumentation.get(AndroidInstrumentationInfo.PROVIDER).getTargetApk(); } private String getTestStubContents(ConfiguredTarget androidInstrumentationTest) throws Exception { Action templateAction = getGeneratingAction( androidInstrumentationTest.getProvider(FilesToRunProvider.class).getExecutable()); return ((TemplateExpansionAction) templateAction).getFileContents(); } }
/* * This file is part of SpongeAPI, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.api.extra.skylands; import com.flowpowered.math.vector.Vector3i; import com.flowpowered.noise.module.Module; import com.flowpowered.noise.module.source.Voronoi; import org.spongepowered.api.block.BlockState; import org.spongepowered.api.block.BlockType; import org.spongepowered.api.block.BlockTypes; import org.spongepowered.api.data.key.Keys; import org.spongepowered.api.data.type.PlantType; import org.spongepowered.api.data.type.PlantTypes; import org.spongepowered.api.data.type.ShrubTypes; import org.spongepowered.api.world.World; import org.spongepowered.api.world.extent.ImmutableBiomeArea; import org.spongepowered.api.world.extent.MutableBlockVolume; import org.spongepowered.api.world.gen.GenerationPopulator; /** * Places tall grass with groups of flowers. */ public class SkylandsGrassPopulator implements GenerationPopulator { private static final double GRASS_ODDS = 0.3; private static final double DOUBLE_GRASS_ODDS = 0.9; private static final double COVERED_GRASS_ODDS = 0.8; private static final BlockState TALL_GRASS; // the type of flower cells, null means just grass, so it's not all flowers @SuppressWarnings("ConstantConditions") private static final Flower[] FLOWERS = { new Flower(BlockTypes.YELLOW_FLOWER), new Flower(PlantTypes.WHITE_TULIP), new Flower(PlantTypes.ORANGE_TULIP), new Flower(PlantTypes.BLUE_ORCHID), new Flower(PlantTypes.HOUSTONIA), new Flower(PlantTypes.POPPY), null, null, null, null, null, null }; private final Voronoi flowerCells = new Voronoi(); private final Voronoi flowerDensities = new Voronoi(); private final RarityCurve flowerOdds = new RarityCurve(); static { //noinspection ConstantConditions final BlockState defaultGrass = BlockTypes.TALLGRASS.getDefaultState(); //noinspection ConstantConditions TALL_GRASS = defaultGrass.with(Keys.SHRUB_TYPE, ShrubTypes.TALL_GRASS).get(); } /** * Constructs a new grass populator for the Skylands. */ public SkylandsGrassPopulator() { this.flowerCells.setFrequency(0.1); this.flowerCells.setDisplacement(FLOWERS.length - 1); this.flowerCells.setEnableDistance(false); this.flowerDensities.setFrequency(0.1); this.flowerDensities.setDisplacement(0); this.flowerDensities.setEnableDistance(true); this.flowerOdds.setSourceModule(0, this.flowerDensities); this.flowerOdds.setDegree(5); } @Override @SuppressWarnings("ConstantConditions") public void populate(World world, MutableBlockVolume buffer, ImmutableBiomeArea biomes) { final Vector3i max = buffer.getBlockMax(); final Vector3i min = buffer.getBlockMin(); final int yMax = max.getY() - 2; final int yMin = min.getY(); if (yMax < SkylandsTerrainGenerator.MIN_HEIGHT || yMin > SkylandsTerrainGenerator.MAX_HEIGHT) { return; } final long seed = world.getProperties().getSeed(); final int intSeed = (int) (seed >> 32 ^ seed); final int intSeed2 = intSeed * 28703; final int yStart = Math.min(yMax, SkylandsTerrainGenerator.MAX_HEIGHT); final int yEnd = Math.max(yMin, SkylandsTerrainGenerator.MIN_HEIGHT); final int xMin = min.getX(); final int zMin = min.getZ(); final int xMax = max.getX(); final int zMax = max.getZ(); for (int zz = zMin; zz <= zMax; zz++) { for (int xx = xMin; xx <= xMax; xx++) { // get the y value of the topmost block int yy = SkylandsUtil.getNextSolid(buffer, xx, yStart, zz, yEnd); if (yy < yEnd) { continue; } // only place tall grass and flowers on grass blocks if (buffer.getBlockType(xx, yy, zz) == BlockTypes.GRASS) { // some random value to compare to odds final float value = SkylandsUtil.hashToFloat(xx, zz, seed); // get the flower for the current cell, may be null this.flowerCells.setSeed(intSeed); this.flowerDensities.setSeed(intSeed); Flower flower = FLOWERS[(int) this.flowerCells.getValue(xx, 0, zz)]; // check if we have a flower based on odds for the cell if (flower == null || value < this.flowerOdds.getValue(xx, 0, zz)) { // try with a different seed to create a second layer of flower cells, giving us some overlap this.flowerCells.setSeed(intSeed2); this.flowerDensities.setSeed(intSeed2); flower = FLOWERS[(int) this.flowerCells.getValue(xx, 0, zz)]; // try the check again if we have a flower if (flower != null && value < this.flowerOdds.getValue(xx, 0, zz)) { // check failed, no flowers flower = null; } } if (flower != null) { buffer.setBlock(xx, yy + 1, zz, flower.getBlock()); if (flower.isDoubleHeight()) { buffer.setBlock(xx, yy + 2, zz, flower.getUpperBlock()); } } else if (value >= GRASS_ODDS) { // if no flower, check if the value is greater than the grass odds if (value >= DOUBLE_GRASS_ODDS && yy + 1 < yMax) { // tall grass is a bit more rare //buffer.setBlockType(xx, yy + 1, zz, BlockTypes.MELON_BLOCK); //buffer.setBlockType(xx, yy + 2, zz, BlockTypes.MELON_BLOCK); // TODO: fix double plants buffer.setBlock(xx, yy + 1, zz, TALL_GRASS); } else { buffer.setBlock(xx, yy + 1, zz, TALL_GRASS); } } } // locations underneath this one will only get grass and less of it as they are covered yy = SkylandsUtil.getNextAir(buffer, xx, yy, zz, yEnd); yy = SkylandsUtil.getNextSolid(buffer, xx, yy, zz, yEnd); int layerNumber = 0; while (yy >= yEnd) { // only place on grass blockss if (buffer.getBlockType(xx, yy, zz) == BlockTypes.GRASS) { // generate a new random value for the layer final float value = SkylandsUtil.hashToFloat(xx, layerNumber, zz, seed); if (value >= COVERED_GRASS_ODDS) { buffer.setBlock(xx, yy + 1, zz, TALL_GRASS); } } layerNumber++; yy = SkylandsUtil.getNextAir(buffer, xx, yy, zz, yEnd); yy = SkylandsUtil.getNextSolid(buffer, xx, yy, zz, yEnd); } } } } @SuppressWarnings("ConstantConditions") private static class Flower { private static final BlockState DEFAULT_FLOWER = BlockTypes.RED_FLOWER.getDefaultState(); private final BlockState block; private final boolean doubleHeight; private final BlockState upperBlock; private Flower(PlantType type) { this(DEFAULT_FLOWER.with(Keys.PLANT_TYPE, type).get(), false); } private Flower(BlockType block) { this(block.getDefaultState(), false); } private Flower(BlockState block, boolean doubleHeight) { this.block = block; this.doubleHeight = doubleHeight; this.upperBlock = this.block; } private BlockState getBlock() { return this.block; } private boolean isDoubleHeight() { return this.doubleHeight; } private BlockState getUpperBlock() { return this.upperBlock; } } private static class RarityCurve extends Module { private double degree; private RarityCurve() { super(1); } private void setDegree(double degree) { this.degree = degree; } @Override public int getSourceModuleCount() { return 1; } @Override public double getValue(double x, double y, double z) { final double value = this.sourceModule[0].getValue(x, y, z); return 1 - Math.pow(1 - value, this.degree); } } }
package calculate; import java.io.IOException; import java.util.*; import javax.servlet.ServletException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import facebook4j.Facebook; import facebook4j.FacebookException; import facebook4j.Friend; import facebook4j.Reading; import facebook4j.ResponseList; import facebook4j.User; import facebook4j.internal.org.json.JSONArray; import facebook4j.internal.org.json.JSONException; import facebook4j.internal.org.json.JSONObject; /** * Servlet implementation class calculate */ @WebServlet("/calculate") public class calculate extends HttpServlet { private static final long serialVersionUID = 1L; /** * @see HttpServlet#HttpServlet() */ public calculate() { super(); // TODO Auto-generated constructor stub } /** * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response) */ protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { Facebook facebook = (Facebook)request.getSession().getAttribute("facebook"); try { ResponseList<Friend> list = facebook.getFriends(); } catch (FacebookException e) { // TODO Auto-generated catch block e.printStackTrace(); } /********************************************************************************* * Queries *********************************************************************************/ // Multiple FQL Map<String, String> queries = new HashMap<String, String>(); queries.put("music", "SELECT name,music FROM user WHERE uid IN (SELECT uid2 FROM friend WHERE uid1 = me() LIMIT 500) AND CONTAINS('musician/band')"); queries.put("movies", "SELECT name,movies FROM user WHERE uid IN (SELECT uid2 FROM friend WHERE uid1 = me() LIMIT 500)"); queries.put("tv", "SELECT name,tv FROM user WHERE uid IN (SELECT uid2 FROM friend WHERE uid1 = me() LIMIT 500)"); //queries.put("current_location", "SELECT name,current_location FROM user WHERE uid IN (SELECT uid2 FROM friend WHERE uid1 = me() LIMIT 500)"); //queries.put("favorite_teams", "SELECT name,favorite_teams FROM user WHERE uid IN (SELECT uid2 FROM friend WHERE uid1 = me() LIMIT 500)"); Map<String, JSONArray> result = null; try { result = facebook.executeMultiFQL(queries); } catch (FacebookException e) { // TODO Auto-generated catch block e.printStackTrace(); } /********************************************************************************* * Music *********************************************************************************/ // Map HashMap<String, Integer> m = new HashMap<String, Integer>(); JSONArray allFriendsJSONArray = result.get("music"); for (int i = 0; i < allFriendsJSONArray.length(); i++) { JSONObject jsonObject = null; try { jsonObject = allFriendsJSONArray.getJSONObject(i); String list = (String) jsonObject.get("music"); String[] music = list.split("[,]+"); if (music[0] != null && music[0].length() != 0) { //System.out.println("\nNAME: " + jsonObject.get("name")); //System.out.println(music.length); for (int j = 0; j < music.length; j++) { music[j] = music[j].trim(); //System.out.println(music[j]); if (m.get(music[j]) != null) m.put(music[j], (m.get(music[j]) + 1)); else m.put(music[j], 1); } } } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); } } // Sort Map List<Map.Entry> a = new ArrayList<Map.Entry>(m.entrySet()); Collections.sort(a, new Comparator() { public int compare(Object o1, Object o2) { Map.Entry e1 = (Map.Entry) o1; Map.Entry e2 = (Map.Entry) o2; return ((Comparable) e2.getValue()).compareTo(e1.getValue()); } }); // Set music list ArrayList<myObject> music = new ArrayList<myObject>(); for (int k = 0; k < 15; k++) { String key = (String)a.get(k).getKey(); int count = (int)a.get(k).getValue(); myObject item = new myObject(key, count); music.add(item); } /********************************************************************************* * Movies *********************************************************************************/ // Map HashMap<String, Integer> v = new HashMap<String, Integer>(); JSONArray allMoviesJSONArray = result.get("movies"); for (int i = 0; i < allFriendsJSONArray.length(); i++) { JSONObject jsonObject = null; try { jsonObject = allMoviesJSONArray.getJSONObject(i); String list = (String) jsonObject.get("movies"); String[] movies = list.split("[,]+"); if (movies[0] != null && movies[0].length() != 0) { //System.out.println("\nNAME: " + jsonObject.get("name")); //System.out.println(music.length); for (int j = 0; j < movies.length; j++) { movies[j] = movies[j].trim(); //System.out.println(music[j]); if (v.get(movies[j]) != null) v.put(movies[j], (v.get(movies[j]) + 1)); else v.put(movies[j], 1); } } } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); } } // Sort Map List<Map.Entry> c = new ArrayList<Map.Entry>(v.entrySet()); Collections.sort(c, new Comparator() { public int compare(Object o1, Object o2) { Map.Entry e1 = (Map.Entry) o1; Map.Entry e2 = (Map.Entry) o2; return ((Comparable) e2.getValue()).compareTo(e1.getValue()); } }); // Set movie list ArrayList<myObject> movies = new ArrayList<myObject>(); for (int k = 0; k < 15; k++) { String key = (String)c.get(k).getKey(); int count = (int)c.get(k).getValue(); myObject item = new myObject(key, count); movies.add(item); } /********************************************************************************* * TV Shows *********************************************************************************/ // Map HashMap<String, Integer> t = new HashMap<String, Integer>(); JSONArray allTvshowsJSONArray = result.get("tv"); for (int i = 0; i < allFriendsJSONArray.length(); i++) { JSONObject jsonObject = null; try { jsonObject = allTvshowsJSONArray.getJSONObject(i); String list = (String) jsonObject.get("tv"); String[] tv = list.split("[,]+"); if (tv[0] != null && tv[0].length() != 0) { //System.out.println("\nNAME: " + jsonObject.get("name")); //System.out.println(music.length); for (int j = 0; j < tv.length; j++) { tv[j] = tv[j].trim(); //System.out.println(music[j]); if (t.get(tv[j]) != null) t.put(tv[j], (t.get(tv[j]) + 1)); else t.put(tv[j], 1); } } } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); } } // Sort Map List<Map.Entry> e = new ArrayList<Map.Entry>(t.entrySet()); Collections.sort(e, new Comparator() { public int compare(Object o1, Object o2) { Map.Entry e1 = (Map.Entry) o1; Map.Entry e2 = (Map.Entry) o2; return ((Comparable) e2.getValue()).compareTo(e1.getValue()); } }); // Set tv list ArrayList<myObject> tv = new ArrayList<myObject>(); for (int k = 0; k < 15; k++) { String key = (String)e.get(k).getKey(); int count = (int)e.get(k).getValue(); myObject item = new myObject(key, count); tv.add(item); } /************************************************************************** * Setting Data to Forward **************************************************************************/ // Pass the data in the map request.setAttribute("music", music); request.setAttribute("movies", movies); request.setAttribute("tv", tv); <<<<<<< HEAD //<<<<<<< HEAD request.getRequestDispatcher("profile.jsp").forward(request, response); //======= //request.getRequestDispatcher("index.jsp").forward(request, response); //>>>>>>> FETCH_HEAD ======= request.getRequestDispatcher("profile.jsp").forward(request, response); >>>>>>> e8515f6eb5acb88740a3efa23f025e6a8fad62ad } /** * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response) */ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // TODO Auto-generated method stub } }
package io.reneses.tela.core.sessions.repositories; import com.tinkerpop.blueprints.impls.orient.OrientBaseGraph; import com.tinkerpop.blueprints.impls.orient.OrientVertex; import io.reneses.tela.TestUtils; import io.reneses.tela.core.databases.orientdb.OrientGraphWrapperFactory; import io.reneses.tela.core.databases.orientdb.OrientGraphWrapper; import io.reneses.tela.core.sessions.databases.extensions.SessionOrientDatabaseExtension; import io.reneses.tela.core.sessions.models.Session; import org.junit.*; import java.util.List; import static org.junit.Assert.*; public class OrientSessionManagerRepositoryTest { private OrientSessionManagerRepository repository; private OrientGraphWrapper telaGraph; private OrientBaseGraph graph; @Before public void setUp() throws Exception { TestUtils.configureSessionManager(); repository = new OrientSessionManagerRepository(); telaGraph = OrientGraphWrapperFactory.get(); graph = OrientGraphWrapperFactory.get().getNoTxGraph(); } @After public void tearDown() throws Exception { TestUtils.destroyDatabase(); } @Test public void createWithoutModules() throws Exception { Session session = new Session(); repository.create(session); List<OrientVertex> vertices = telaGraph.getVertices(graph, SessionOrientDatabaseExtension.SESSION_CLASS); assertEquals(1, vertices.size()); assertEquals(session.getId(), vertices.get(0).getProperty(SessionOrientDatabaseExtension.SESSION_ID)); assertEquals(session.getAccessToken(), vertices.get(0).getProperty(SessionOrientDatabaseExtension.SESSION_ACCESS_TOKEN)); vertices = telaGraph.getVertices(graph, SessionOrientDatabaseExtension.TOKEN_CLASS); assertTrue(vertices.isEmpty()); } @Test public void createWithModules() throws Exception { Session session = new Session(); repository.create(session); repository.addModuleToken(session, "t1", "1"); repository.addModuleToken(session, "t2", "2"); List<OrientVertex> vertices = telaGraph.getVertices(graph, SessionOrientDatabaseExtension.SESSION_CLASS); assertEquals(1, vertices.size()); assertEquals(session.getId(), vertices.get(0).getProperty(SessionOrientDatabaseExtension.SESSION_ID)); assertEquals(session.getAccessToken(), vertices.get(0).getProperty(SessionOrientDatabaseExtension.SESSION_ACCESS_TOKEN)); vertices = telaGraph.getVertices(graph, SessionOrientDatabaseExtension.TOKEN_CLASS); assertEquals(2, vertices.size()); } @Test public void delete() throws Exception { Session session = new Session(); repository.create(session); assertTrue(repository.delete(session)); List<OrientVertex> vertices = telaGraph.getVertices(graph, SessionOrientDatabaseExtension.SESSION_CLASS); assertTrue(vertices.isEmpty()); vertices = telaGraph.getVertices(graph, SessionOrientDatabaseExtension.TOKEN_CLASS); assertTrue(vertices.isEmpty()); } @Test public void deleteWithTokens() throws Exception { Session session = new Session(); repository.create(session); repository.addModuleToken(session, "t1", "1"); repository.addModuleToken(session, "t2", "2"); assertTrue(repository.delete(session)); List<OrientVertex> vertices = telaGraph.getVertices(graph, SessionOrientDatabaseExtension.SESSION_CLASS); assertTrue(vertices.isEmpty()); vertices = telaGraph.getVertices(graph, SessionOrientDatabaseExtension.TOKEN_CLASS); assertTrue(vertices.isEmpty()); } @Test public void deleteNotExisting() throws Exception { Session session = new Session(); assertFalse(repository.delete(session)); } @Test public void findByAccessToken() throws Exception { Session session = new Session(); repository.create(session); Session retrieved = repository.findByAccessToken(session.getAccessToken()); assertNotNull(retrieved); assertEquals(session.getId(), retrieved.getId()); } @Test public void findByAccessTokenWithTokens() throws Exception { Session session = new Session(); repository.create(session); repository.addModuleToken(session, "t", "1"); Session retrieved = repository.findByAccessToken(session.getAccessToken()); assertNotNull(retrieved); assertEquals(session.getId(), retrieved.getId()); assertEquals("1", retrieved.getToken("t")); } @Test public void findByAccessTokenNotExisting() throws Exception { Session session = new Session(); assertNull(repository.findByAccessToken(session.getAccessToken())); } @Test public void existsByAccessToken() throws Exception { Session session = new Session(); repository.create(session); assertTrue(repository.existsByAccessToken(session.getAccessToken())); } @Test public void existsByAccessTokenNotExisting() throws Exception { Session session = new Session(); assertFalse(repository.existsByAccessToken(session.getAccessToken())); } @Test public void addModuleToken() throws Exception { Session session = new Session(); repository.create(session); assertTrue(repository.addModuleToken(session, "t", "1")); assertNotNull(repository.findByModuleToken("t", "1")); } @Test public void addModuleTokenMultipleTokens() throws Exception { Session session = new Session(); repository.create(session); assertTrue(repository.addModuleToken(session, "t", "1")); assertTrue(repository.addModuleToken(session, "s", "2")); assertNotNull(repository.findByModuleToken("t", "1")); assertNotNull(repository.findByModuleToken("s", "2")); } @Test public void addModuleTokenToNotExistingSession() throws Exception { Session session = new Session(); assertFalse(repository.addModuleToken(session, "t1", "1")); } @Test public void findByModuleToken() throws Exception { Session session = new Session(); repository.create(session); repository.addModuleToken(session, "t", "1"); Session retrieved = repository.findByModuleToken("t", "1"); assertNotNull(retrieved); assertEquals(session.getId(), retrieved.getId()); } @Test public void findByModuleTokenNotExisting() throws Exception { Session retrieved = repository.findByModuleToken("t", "1"); assertNull(retrieved); } @Test public void deleteModuleToken() throws Exception { Session session = new Session(); repository.create(session); repository.addModuleToken(session, "t", "1"); assertTrue(repository.deleteModuleToken(session, "t")); assertTrue(repository.existsByAccessToken(session.getAccessToken())); } @Test public void deleteModuleTokenNotExistingSession() throws Exception { Session session = new Session(); assertFalse(repository.deleteModuleToken(session, "t")); } @Test public void deleteModuleTokenNotExistingToken() throws Exception { Session session = new Session(); repository.create(session); assertFalse(repository.deleteModuleToken(session, "t")); } }
/* * Copyright 2005 Joe Walker * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.directwebremoting.proxy; import java.util.ArrayList; import java.util.Collection; import java.util.List; import org.directwebremoting.ScriptBuffer; import org.directwebremoting.ScriptSession; /** * Class to help people send scripts to collections of browsers. * ScriptProxy also is the base class for the Java implementations of GI, Util * and Script.aculo.us.Effect. * @author Joe Walker [joe at getahead dot ltd dot uk] * @deprecated Use org.directwebremoting.ui.ScriptProxy * @see org.directwebremoting.ui.ScriptProxy */ @Deprecated public class ScriptProxy { /** * Http thread constructor */ public ScriptProxy() { } /** * Http thread constructor * @param scriptSession The browser to alter */ public ScriptProxy(ScriptSession scriptSession) { scriptSessions.add(scriptSession); } /** * Non-http thread constructor * @param scriptSessions The browsers to alter */ public ScriptProxy(Collection<ScriptSession> scriptSessions) { this.scriptSessions.addAll(scriptSessions); } /** * @param scriptSession The script session to add to the list */ public void addScriptSession(ScriptSession scriptSession) { scriptSessions.add(scriptSession); } /** * @param addScriptSessions The script sessions to add to the list */ public void addScriptSessions(Collection<ScriptSession> addScriptSessions) { scriptSessions.addAll(addScriptSessions); } /** * Call a named function with no parameters. * @param funcName The name of the function to call */ public void addFunctionCall(String funcName) { ScriptBuffer script = new ScriptBuffer(); script.appendScript(funcName) .appendScript("();"); addScript(script); } /** * Call a named function with one parameter. * @param funcName The name of the function to call * @param param1 The first parameter to the above function */ public void addFunctionCall(String funcName, Object param1) { ScriptBuffer script = new ScriptBuffer(); script.appendScript(funcName) .appendScript("(") .appendData(param1) .appendScript(");"); addScript(script); } /** * Call a named function with one parameter. * @param funcName The name of the function to call * @param param1 The first parameter to the above function * @param param2 The second parameter to the above function */ public void addFunctionCall(String funcName, Object param1, Object param2) { ScriptBuffer script = new ScriptBuffer(); script.appendScript(funcName) .appendScript("(") .appendData(param1) .appendScript(",") .appendData(param2) .appendScript(");"); addScript(script); } /** * Call a named function with one parameter. * @param funcName The name of the function to call * @param param1 The first parameter to the above function * @param param2 The second parameter to the above function * @param param3 The third parameter to the above function */ public void addFunctionCall(String funcName, Object param1, Object param2, Object param3) { ScriptBuffer script = new ScriptBuffer(); script.appendScript(funcName) .appendScript("(") .appendData(param1) .appendScript(",") .appendData(param2) .appendScript(",") .appendData(param3) .appendScript(");"); addScript(script); } /** * Call a named function with one parameter. * @param funcName The name of the function to call * @param param1 The first parameter to the above function * @param param2 The second parameter to the above function * @param param3 The third parameter to the above function * @param param4 The fourth parameter to the above function */ public void addFunctionCall(String funcName, Object param1, Object param2, Object param3, Object param4) { ScriptBuffer script = new ScriptBuffer(); script.appendScript(funcName) .appendScript("(") .appendData(param1) .appendScript(",") .appendData(param2) .appendScript(",") .appendData(param3) .appendScript(",") .appendData(param4) .appendScript(");"); addScript(script); } /** * Call a named function with one parameter. * @param funcName The name of the function to call * @param param1 The first parameter to the above function * @param param2 The second parameter to the above function * @param param3 The third parameter to the above function * @param param4 The fourth parameter to the above function * @param param5 The fifth parameter to the above function */ public void addFunctionCall(String funcName, Object param1, Object param2, Object param3, Object param4, Object param5) { ScriptBuffer script = new ScriptBuffer(); script.appendScript(funcName) .appendScript("(") .appendData(param1) .appendScript(",") .appendData(param2) .appendScript(",") .appendData(param3) .appendScript(",") .appendData(param4) .appendScript(",") .appendData(param5) .appendScript(");"); addScript(script); } /** * Call a named function with one parameter. * @param funcName The name of the function to call * @param param1 The first parameter to the above function * @param param2 The second parameter to the above function * @param param3 The third parameter to the above function * @param param4 The fourth parameter to the above function * @param param5 The fifth parameter to the above function * @param param6 The sixth parameter to the above function */ public void addFunctionCall(String funcName, Object param1, Object param2, Object param3, Object param4, Object param5, Object param6) { ScriptBuffer script = new ScriptBuffer(); script.appendScript(funcName) .appendScript("(") .appendData(param1) .appendScript(",") .appendData(param2) .appendScript(",") .appendData(param3) .appendScript(",") .appendData(param4) .appendScript(",") .appendData(param5) .appendScript(",") .appendData(param6) .appendScript(");"); addScript(script); } /** * Call a named function with one parameter. * @param funcName The name of the function to call * @param param1 The first parameter to the above function * @param param2 The second parameter to the above function * @param param3 The third parameter to the above function * @param param4 The fourth parameter to the above function * @param param5 The fifth parameter to the above function * @param param6 The sixth parameter to the above function * @param param7 The seventh parameter to the above function */ public void addFunctionCall(String funcName, Object param1, Object param2, Object param3, Object param4, Object param5, Object param6, Object param7) { ScriptBuffer script = new ScriptBuffer(); script.appendScript(funcName) .appendScript("(") .appendData(param1) .appendScript(",") .appendData(param2) .appendScript(",") .appendData(param3) .appendScript(",") .appendData(param4) .appendScript(",") .appendData(param5) .appendScript(",") .appendData(param6) .appendScript(",") .appendData(param7) .appendScript(");"); addScript(script); } /** * Utility to add the given script to all known browsers. * @param script The Javascript to send to the browsers */ public void addScript(ScriptBuffer script) { for (ScriptSession scriptSession : scriptSessions) { scriptSession.addScript(script); } } /** * The browsers that we affect. */ private final List<ScriptSession> scriptSessions = new ArrayList<ScriptSession>(); }
package com.github.truemped.heritrix; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.NameValuePair; import org.apache.http.auth.AuthScope; import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.conn.scheme.Scheme; import org.apache.http.conn.ssl.SSLSocketFactory; import org.apache.http.entity.ByteArrayEntity; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.message.BasicNameValuePair; import org.apache.http.protocol.HTTP; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.Document; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathExpressionException; import javax.xml.xpath.XPathFactory; import java.io.*; import java.security.*; import java.security.cert.CertificateException; import java.util.Arrays; import java.util.List; /** * Interface implementation using Apache HTTP Client 4.0.1 */ public class HeritrixSessionImpl implements HeritrixSession { /** * The default http client. */ private final DefaultHttpClient client; /** * Heritrix' base url. */ private final String baseUrl; /** * */ private final DocumentBuilder documentBuilder; /** * My logger. */ private final static Logger LOG = LoggerFactory.getLogger(HeritrixSessionImpl.class); /** * C'tor initializing the Heritrix session. * * @param keystoreFile The {@link java.io.File} containing the SSL certificates. * @param keyStorePassword A password for the keystore file. * @param hostname The hostname where Heritrix runs. * @param port The port on which Heritrix listens. * @param userName The Heritrix Web GUI Username. * @param password The Heritrix Web GUI password. * * @throws HeritrixSessionInitializationException Thrown if there have been problems initializing the session. */ public HeritrixSessionImpl(final File keystoreFile, final String keyStorePassword, final String hostname, final int port, final String userName, final String password) throws HeritrixSessionInitializationException { KeyStore trust; SSLSocketFactory socketFactory; Scheme sch; try { trust = KeyStore.getInstance(KeyStore.getDefaultType()); InputStream instream; try { instream = new FileInputStream(keystoreFile); try { trust.load(instream, keyStorePassword.toCharArray()); } catch (NoSuchAlgorithmException e) { LOG.error("Cannot load the trustfile!", e); } catch (CertificateException e) { LOG.error("Cannot load the trustfile!", e); } catch (IOException e) { LOG.error("Cannot load the trustfile!", e); } finally { instream.close(); } } catch (FileNotFoundException e) { LOG.error("Cannot load the trustfile!", e); } catch (IOException e) { LOG.error("Cannot load the trustfile!", e); } socketFactory = new SSLSocketFactory(trust); sch = new Scheme("https", socketFactory, port); this.client = new DefaultHttpClient(); this.client.getConnectionManager().getSchemeRegistry().register(sch); this.client.getCredentialsProvider().setCredentials(new AuthScope(hostname, port), new UsernamePasswordCredentials(userName, password)); this.baseUrl = "https://" + hostname + ":" + Integer.toString(port) + "/engine/"; } catch (KeyStoreException e) { String msg = "Error with the keystore!"; LOG.error(msg, e); throw new HeritrixSessionInitializationException(msg, e); } catch (KeyManagementException e) { String msg = "Error with the keystore!"; LOG.error(msg, e); throw new HeritrixSessionInitializationException(msg, e); } catch (UnrecoverableKeyException e) { String msg = "Error with the keystore!"; LOG.error(msg, e); throw new HeritrixSessionInitializationException(msg, e); } catch (NoSuchAlgorithmException e) { String msg = "Error with the keystore!"; LOG.error(msg, e); throw new HeritrixSessionInitializationException(msg, e); } try { this.documentBuilder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); } catch (ParserConfigurationException e) { String msg = "Error getting a XML document builder!"; LOG.error(msg, e); throw new HeritrixSessionInitializationException(msg, e); } } /** * Execute a HTTP request. * * @param request The request to execute. * @return The parsed XML document. * @throws IOException When the communication broke. * @throws ClientProtocolException Upon HTTP communication errors. */ private HttpResponse execute(final HttpRequestBase request) throws ClientProtocolException, IOException { return this.client.execute(request); } /** * Execute a HTTP request and parse the returning XML document. * * @param request The request to execute. * @return The parsed XML document. */ private Document executeXml(final HttpRequestBase request) { try { final HttpResponse response = execute(request); final HttpEntity entity = response.getEntity(); return this.documentBuilder.parse(entity.getContent()); } catch (ClientProtocolException e) { LOG.error("Error connecting to the server", e); } catch (IOException e) { LOG.error("IO error communicating with the server", e); } catch (IllegalStateException e) { LOG.error("Cannot parse returning XML. Url was: " + request.getRequestLine(), e); } catch (SAXException e) { LOG.error("Cannot parse returning XML. Url was: " + request.getRequestLine(), e); } return null; } /** * GET a XML document from the REST API. * * @param url The URL to GET. * @return THe parsed XML document. */ private HttpResponse get(final String url) { final HttpGet getMethod = new HttpGet(url); try { return execute(getMethod); } catch (ClientProtocolException e) { LOG.error("Error connecting to the server", e); } catch (IOException e) { LOG.error("Error connecting to the server", e); } return null; } /** * GET a XML document from the REST API and parse the returned XML. * * @param url The URL to GET. * @return THe parsed XML document. */ private Document getXml(final String url) { final HttpGet getMethod = new HttpGet(url); getMethod.addHeader("accept", "application/xml"); return executeXml(getMethod); } /** * Send a POST request to the REST API and read the returning XML. * * @param url The URL to POST to. * @param nameValuePairs A list of {@link org.apache.http.NameValuePair}s. * @return The parsed XML Document. */ private Document postXml(final String url, final NameValuePair... nameValuePairs) { final HttpPost postMethod = new HttpPost(url); postMethod.addHeader("Accept", "application/xml"); if (nameValuePairs.length > 0) { final List<NameValuePair> params = Arrays.asList(nameValuePairs); try { postMethod.setEntity(new UrlEncodedFormEntity(params, HTTP.UTF_8)); } catch (UnsupportedEncodingException e) { LOG.error("Enconding not supported!?", e); } } return executeXml(postMethod); } /** * Send a PUT request to the REST API. * * @param url The exact URL to PUT to. * @param data The data that should be sent. * @return The HTTP Response. */ private HttpResponse put(final String url, final String data) { final HttpPut putMethod = new HttpPut(url); try { putMethod.setEntity(new ByteArrayEntity(data.getBytes())); return execute(putMethod); } catch (ClientProtocolException e) { LOG.error("Error connecting to the server", e); } catch (IOException e) { LOG.error("Error connecting to the server", e); } return null; } /** * @see com.github.truemped.heritrix.HeritrixSession#getJobStatus(String) */ @Override public Document getJobStatus(final String jobName) { return getXml(this.baseUrl + "job/" + jobName); } /** * @see com.github.truemped.heritrix.HeritrixSession#isJobRunning(String) */ @Override public boolean isJobRunning(final String jobName) { final Document d = getJobStatus(jobName); final XPath xPath = XPathFactory.newInstance().newXPath(); boolean isRunning = false; try { final String res = xPath.evaluate("//job/statusDescription", d); if (res.equals("Active: RUNNING")) { isRunning = true; } } catch (XPathExpressionException e) { LOG.error("could not read status from jobdescription", e); } return isRunning; } /** * @see com.github.truemped.heritrix.HeritrixSession#isPaused(String) */ @Override public boolean isPaused(final String jobName) { final Document d = getJobStatus(jobName); final XPath xPath = XPathFactory.newInstance().newXPath(); boolean isRunning = false; try { final String res = xPath.evaluate("//job/statusDescription", d); if (res.equals("Active: PAUSED")) { isRunning = true; } } catch (XPathExpressionException e) { LOG.error("could not read status from jobdescription", e); } return isRunning; } /** * @see com.github.truemped.heritrix.HeritrixSession#createJob(String) */ @Override public void createJob(final String jobName) { final NameValuePair addPath = new BasicNameValuePair("addpath", jobName); final NameValuePair action = new BasicNameValuePair("action", "create"); postXml(this.baseUrl, addPath, action); } /** * @see HeritrixSession#rescanJobDirectory() */ @Override public Document rescanJobDirectory() { return postXml(this.baseUrl, new BasicNameValuePair("action", "rescan")); } /** * @see com.github.truemped.heritrix.HeritrixSession#pauseJob(String) */ @Override public void pauseJob(final String jobName) { if (isJobRunning(jobName)) { postXml(this.baseUrl + "job/" + jobName, new BasicNameValuePair("action", "pause")); } else { LOG.info("job is not running, could not be paused"); } } /** * @see com.github.truemped.heritrix.HeritrixSession#unpauseJob(String) */ @Override public void unpauseJob(final String jobName) { if (!isJobRunning(jobName)) { postXml(this.baseUrl + "job/" + jobName, new BasicNameValuePair("action", "unpause")); } else { LOG.info("job is running, could not unpause"); } } /** * @see com.github.truemped.heritrix.HeritrixSession#buildJob(String) */ @Override public boolean buildJob(final String jobName) { final Document doc = postXml(this.baseUrl + "job/" + jobName, new BasicNameValuePair("action", "build")); final XPath xpath = XPathFactory.newInstance().newXPath(); NodeList jobs; try { jobs = (NodeList) xpath.evaluate("job/statusDescription", doc, XPathConstants.NODESET); for (int i = 0; i < jobs.getLength(); i++) { if (jobs.item(i).getFirstChild().getTextContent().equals("Unbuilt")) { return false; } } } catch (XPathExpressionException e) { LOG.error("could not read the existing jobs", e); } return true; } /** * @see com.github.truemped.heritrix.HeritrixSession#launchJob(String) */ @Override public boolean launchJob(final String jobName) { final Document doc = postXml(this.baseUrl + "job/" + jobName, new BasicNameValuePair("action", "launch")); final XPath xpath = XPathFactory.newInstance().newXPath(); NodeList jobs; try { jobs = (NodeList) xpath.evaluate("job/statusDescription", doc, XPathConstants.NODESET); for (int i = 0; i < jobs.getLength(); i++) { if (jobs.item(i).getFirstChild().getTextContent().equals("Ready")) { return true; } } } catch (XPathExpressionException e) { LOG.error("could not read the existing jobs", e); } return false; } /** * @see com.github.truemped.heritrix.HeritrixSession#terminateJob(String) */ @Override public void terminateJob(final String jobName) { if (isJobRunning(jobName)) { postXml(this.baseUrl + "job/" + jobName, new BasicNameValuePair("action", "terminate")); } else { LOG.info("job is not running"); } } /** * @see com.github.truemped.heritrix.HeritrixSession#tearDownJob(String) */ @Override public void tearDownJob(final String jobName) { postXml(this.baseUrl + "job/" + jobName, new BasicNameValuePair("action", "teardown")); } /** * @see com.github.truemped.heritrix.HeritrixSession#checkpointJob(String) */ @Override public void checkpointJob(final String jobName) { postXml(this.baseUrl + "job/" + jobName, new BasicNameValuePair("action", "checkpoint")); } /** * @see com.github.truemped.heritrix.HeritrixSession#copyJob(String, String, boolean) */ @Override public void copyJob(final String original, final String jobName, final boolean asProfile) { final String url = this.baseUrl + "job/" + original; postXml(url, new BasicNameValuePair("copyTo", jobName)); } /** * @see com.github.truemped.heritrix.HeritrixSession#updateConfig(String, String) */ @Override public void updateConfig(final String jobName, final String cXml) { final String url = this.baseUrl + "job/" + jobName + "/jobdir/crawler-beans.cxml"; final HttpResponse resp = put(url, cXml); try { resp.getEntity().consumeContent(); } catch (IOException e) { LOG.error("Could not consume the content", e); } } /** * @see com.github.truemped.heritrix.HeritrixSession#getCrawlLog(String) */ @Override public HttpResponse getCrawlLog(String jobName) { return get(this.baseUrl + "job/" + jobName + "/jobdir/logs/crawl.log"); } /** * @see com.github.truemped.heritrix.HeritrixSession#jobExists(String) */ @Override public boolean jobExists(String jobName) { final Document rescanDoc = rescanJobDirectory(); final XPath xpath = XPathFactory.newInstance().newXPath(); NodeList jobs; try { jobs = (NodeList) xpath.evaluate("engine/jobs/value/shortName", rescanDoc, XPathConstants.NODESET); for (int i = 0; i < jobs.getLength(); i++) { if (jobs.item(i).getFirstChild().getTextContent().equals(jobName)) { return true; } } } catch (XPathExpressionException e) { LOG.error("could not read the existing jobs", e); } return false; } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.relay.implementation; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.Response; import com.azure.core.http.rest.SimpleResponse; import com.azure.core.util.Context; import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.relay.RelayManager; import com.azure.resourcemanager.relay.fluent.HybridConnectionsClient; import com.azure.resourcemanager.relay.fluent.models.AccessKeysInner; import com.azure.resourcemanager.relay.fluent.models.AuthorizationRuleInner; import com.azure.resourcemanager.relay.fluent.models.HybridConnectionInner; import com.azure.resourcemanager.relay.models.AccessKeys; import com.azure.resourcemanager.relay.models.AuthorizationRule; import com.azure.resourcemanager.relay.models.HybridConnection; import com.azure.resourcemanager.relay.models.HybridConnections; import com.azure.resourcemanager.relay.models.RegenerateAccessKeyParameters; import com.fasterxml.jackson.annotation.JsonIgnore; public final class HybridConnectionsImpl implements HybridConnections { @JsonIgnore private final ClientLogger logger = new ClientLogger(HybridConnectionsImpl.class); private final HybridConnectionsClient innerClient; private final RelayManager serviceManager; public HybridConnectionsImpl(HybridConnectionsClient innerClient, RelayManager serviceManager) { this.innerClient = innerClient; this.serviceManager = serviceManager; } public PagedIterable<HybridConnection> listByNamespace(String resourceGroupName, String namespaceName) { PagedIterable<HybridConnectionInner> inner = this.serviceClient().listByNamespace(resourceGroupName, namespaceName); return inner.mapPage(inner1 -> new HybridConnectionImpl(inner1, this.manager())); } public PagedIterable<HybridConnection> listByNamespace( String resourceGroupName, String namespaceName, Context context) { PagedIterable<HybridConnectionInner> inner = this.serviceClient().listByNamespace(resourceGroupName, namespaceName, context); return inner.mapPage(inner1 -> new HybridConnectionImpl(inner1, this.manager())); } public void delete(String resourceGroupName, String namespaceName, String hybridConnectionName) { this.serviceClient().delete(resourceGroupName, namespaceName, hybridConnectionName); } public Response<Void> deleteWithResponse( String resourceGroupName, String namespaceName, String hybridConnectionName, Context context) { return this.serviceClient().deleteWithResponse(resourceGroupName, namespaceName, hybridConnectionName, context); } public HybridConnection get(String resourceGroupName, String namespaceName, String hybridConnectionName) { HybridConnectionInner inner = this.serviceClient().get(resourceGroupName, namespaceName, hybridConnectionName); if (inner != null) { return new HybridConnectionImpl(inner, this.manager()); } else { return null; } } public Response<HybridConnection> getWithResponse( String resourceGroupName, String namespaceName, String hybridConnectionName, Context context) { Response<HybridConnectionInner> inner = this.serviceClient().getWithResponse(resourceGroupName, namespaceName, hybridConnectionName, context); if (inner != null) { return new SimpleResponse<>( inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), new HybridConnectionImpl(inner.getValue(), this.manager())); } else { return null; } } public PagedIterable<AuthorizationRule> listAuthorizationRules( String resourceGroupName, String namespaceName, String hybridConnectionName) { PagedIterable<AuthorizationRuleInner> inner = this.serviceClient().listAuthorizationRules(resourceGroupName, namespaceName, hybridConnectionName); return inner.mapPage(inner1 -> new AuthorizationRuleImpl(inner1, this.manager())); } public PagedIterable<AuthorizationRule> listAuthorizationRules( String resourceGroupName, String namespaceName, String hybridConnectionName, Context context) { PagedIterable<AuthorizationRuleInner> inner = this .serviceClient() .listAuthorizationRules(resourceGroupName, namespaceName, hybridConnectionName, context); return inner.mapPage(inner1 -> new AuthorizationRuleImpl(inner1, this.manager())); } public AuthorizationRule createOrUpdateAuthorizationRule( String resourceGroupName, String namespaceName, String hybridConnectionName, String authorizationRuleName, AuthorizationRuleInner parameters) { AuthorizationRuleInner inner = this .serviceClient() .createOrUpdateAuthorizationRule( resourceGroupName, namespaceName, hybridConnectionName, authorizationRuleName, parameters); if (inner != null) { return new AuthorizationRuleImpl(inner, this.manager()); } else { return null; } } public Response<AuthorizationRule> createOrUpdateAuthorizationRuleWithResponse( String resourceGroupName, String namespaceName, String hybridConnectionName, String authorizationRuleName, AuthorizationRuleInner parameters, Context context) { Response<AuthorizationRuleInner> inner = this .serviceClient() .createOrUpdateAuthorizationRuleWithResponse( resourceGroupName, namespaceName, hybridConnectionName, authorizationRuleName, parameters, context); if (inner != null) { return new SimpleResponse<>( inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), new AuthorizationRuleImpl(inner.getValue(), this.manager())); } else { return null; } } public void deleteAuthorizationRule( String resourceGroupName, String namespaceName, String hybridConnectionName, String authorizationRuleName) { this .serviceClient() .deleteAuthorizationRule(resourceGroupName, namespaceName, hybridConnectionName, authorizationRuleName); } public Response<Void> deleteAuthorizationRuleWithResponse( String resourceGroupName, String namespaceName, String hybridConnectionName, String authorizationRuleName, Context context) { return this .serviceClient() .deleteAuthorizationRuleWithResponse( resourceGroupName, namespaceName, hybridConnectionName, authorizationRuleName, context); } public AuthorizationRule getAuthorizationRule( String resourceGroupName, String namespaceName, String hybridConnectionName, String authorizationRuleName) { AuthorizationRuleInner inner = this .serviceClient() .getAuthorizationRule(resourceGroupName, namespaceName, hybridConnectionName, authorizationRuleName); if (inner != null) { return new AuthorizationRuleImpl(inner, this.manager()); } else { return null; } } public Response<AuthorizationRule> getAuthorizationRuleWithResponse( String resourceGroupName, String namespaceName, String hybridConnectionName, String authorizationRuleName, Context context) { Response<AuthorizationRuleInner> inner = this .serviceClient() .getAuthorizationRuleWithResponse( resourceGroupName, namespaceName, hybridConnectionName, authorizationRuleName, context); if (inner != null) { return new SimpleResponse<>( inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), new AuthorizationRuleImpl(inner.getValue(), this.manager())); } else { return null; } } public AccessKeys listKeys( String resourceGroupName, String namespaceName, String hybridConnectionName, String authorizationRuleName) { AccessKeysInner inner = this .serviceClient() .listKeys(resourceGroupName, namespaceName, hybridConnectionName, authorizationRuleName); if (inner != null) { return new AccessKeysImpl(inner, this.manager()); } else { return null; } } public Response<AccessKeys> listKeysWithResponse( String resourceGroupName, String namespaceName, String hybridConnectionName, String authorizationRuleName, Context context) { Response<AccessKeysInner> inner = this .serviceClient() .listKeysWithResponse( resourceGroupName, namespaceName, hybridConnectionName, authorizationRuleName, context); if (inner != null) { return new SimpleResponse<>( inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), new AccessKeysImpl(inner.getValue(), this.manager())); } else { return null; } } public AccessKeys regenerateKeys( String resourceGroupName, String namespaceName, String hybridConnectionName, String authorizationRuleName, RegenerateAccessKeyParameters parameters) { AccessKeysInner inner = this .serviceClient() .regenerateKeys( resourceGroupName, namespaceName, hybridConnectionName, authorizationRuleName, parameters); if (inner != null) { return new AccessKeysImpl(inner, this.manager()); } else { return null; } } public Response<AccessKeys> regenerateKeysWithResponse( String resourceGroupName, String namespaceName, String hybridConnectionName, String authorizationRuleName, RegenerateAccessKeyParameters parameters, Context context) { Response<AccessKeysInner> inner = this .serviceClient() .regenerateKeysWithResponse( resourceGroupName, namespaceName, hybridConnectionName, authorizationRuleName, parameters, context); if (inner != null) { return new SimpleResponse<>( inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), new AccessKeysImpl(inner.getValue(), this.manager())); } else { return null; } } public HybridConnection getById(String id) { String resourceGroupName = Utils.getValueFromIdByName(id, "resourceGroups"); if (resourceGroupName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String .format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id))); } String namespaceName = Utils.getValueFromIdByName(id, "namespaces"); if (namespaceName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String.format("The resource ID '%s' is not valid. Missing path segment 'namespaces'.", id))); } String hybridConnectionName = Utils.getValueFromIdByName(id, "hybridConnections"); if (hybridConnectionName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String .format( "The resource ID '%s' is not valid. Missing path segment 'hybridConnections'.", id))); } return this.getWithResponse(resourceGroupName, namespaceName, hybridConnectionName, Context.NONE).getValue(); } public Response<HybridConnection> getByIdWithResponse(String id, Context context) { String resourceGroupName = Utils.getValueFromIdByName(id, "resourceGroups"); if (resourceGroupName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String .format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id))); } String namespaceName = Utils.getValueFromIdByName(id, "namespaces"); if (namespaceName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String.format("The resource ID '%s' is not valid. Missing path segment 'namespaces'.", id))); } String hybridConnectionName = Utils.getValueFromIdByName(id, "hybridConnections"); if (hybridConnectionName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String .format( "The resource ID '%s' is not valid. Missing path segment 'hybridConnections'.", id))); } return this.getWithResponse(resourceGroupName, namespaceName, hybridConnectionName, context); } public void deleteById(String id) { String resourceGroupName = Utils.getValueFromIdByName(id, "resourceGroups"); if (resourceGroupName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String .format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id))); } String namespaceName = Utils.getValueFromIdByName(id, "namespaces"); if (namespaceName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String.format("The resource ID '%s' is not valid. Missing path segment 'namespaces'.", id))); } String hybridConnectionName = Utils.getValueFromIdByName(id, "hybridConnections"); if (hybridConnectionName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String .format( "The resource ID '%s' is not valid. Missing path segment 'hybridConnections'.", id))); } this.deleteWithResponse(resourceGroupName, namespaceName, hybridConnectionName, Context.NONE).getValue(); } public Response<Void> deleteByIdWithResponse(String id, Context context) { String resourceGroupName = Utils.getValueFromIdByName(id, "resourceGroups"); if (resourceGroupName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String .format("The resource ID '%s' is not valid. Missing path segment 'resourceGroups'.", id))); } String namespaceName = Utils.getValueFromIdByName(id, "namespaces"); if (namespaceName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String.format("The resource ID '%s' is not valid. Missing path segment 'namespaces'.", id))); } String hybridConnectionName = Utils.getValueFromIdByName(id, "hybridConnections"); if (hybridConnectionName == null) { throw logger .logExceptionAsError( new IllegalArgumentException( String .format( "The resource ID '%s' is not valid. Missing path segment 'hybridConnections'.", id))); } return this.deleteWithResponse(resourceGroupName, namespaceName, hybridConnectionName, context); } private HybridConnectionsClient serviceClient() { return this.innerClient; } private RelayManager manager() { return this.serviceManager; } public HybridConnectionImpl define(String name) { return new HybridConnectionImpl(name, this.manager()); } }
/* * Copyright (C) 2013 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.tools.idea.designer; import com.intellij.android.designer.designSurface.graphics.DirectionResizePoint; import com.intellij.android.designer.model.RadViewComponent; import com.intellij.designer.palette.PaletteItem; import com.intellij.designer.utils.Position; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; /** * A {@link ResizePolicy} records state for whether a widget is resizable, and if so, in * which directions * <p/> * <b>NOTE: This is not a public or final API; if you rely on this be prepared * to adjust your code for the next tools release.</b> */ public class ResizePolicy { private static final int NONE = 0; private static final int LEFT_EDGE = 1; private static final int RIGHT_EDGE = 2; private static final int TOP_EDGE = 4; private static final int BOTTOM_EDGE = 8; private static final int PRESERVE_RATIO = 16; // Aliases private static final int HORIZONTAL = LEFT_EDGE | RIGHT_EDGE; private static final int VERTICAL = TOP_EDGE | BOTTOM_EDGE; private static final int ANY = HORIZONTAL | VERTICAL; // Shared objects for common policies private static final ResizePolicy ourAny = new ResizePolicy(ANY); private static final ResizePolicy ourNone = new ResizePolicy(NONE); private static final ResizePolicy ourHorizontal = new ResizePolicy(HORIZONTAL); private static final ResizePolicy ourVertical = new ResizePolicy(VERTICAL); private static final ResizePolicy ourScaled = new ResizePolicy(ANY | PRESERVE_RATIO); private final int myFlags; // Use factory methods to construct private ResizePolicy(int flags) { myFlags = flags; } /** * Returns true if this policy allows resizing in at least one direction * * @return true if this policy allows resizing in at least one direction */ public boolean isResizable() { return (myFlags & ANY) != 0; } /** * Returns true if this policy allows resizing the top edge * * @return true if this policy allows resizing the top edge */ public boolean topAllowed() { return (myFlags & TOP_EDGE) != 0; } /** * Returns true if this policy allows resizing the right edge * * @return true if this policy allows resizing the right edge */ public boolean rightAllowed() { return (myFlags & RIGHT_EDGE) != 0; } /** * Returns true if this policy allows resizing the bottom edge * * @return true if this policy allows resizing the bottom edge */ public boolean bottomAllowed() { return (myFlags & BOTTOM_EDGE) != 0; } /** * Returns true if this policy allows resizing the left edge * * @return true if this policy allows resizing the left edge */ public boolean leftAllowed() { return (myFlags & LEFT_EDGE) != 0; } /** * Returns true if this policy requires resizing in an aspect-ratio preserving manner * * @return true if this policy requires resizing in an aspect-ratio preserving manner */ public boolean isAspectPreserving() { return (myFlags & PRESERVE_RATIO) != 0; } /** * Returns a resize policy allowing resizing in any direction * * @return a resize policy allowing resizing in any direction */ @NotNull public static ResizePolicy full() { return ourAny; } /** * Returns a resize policy not allowing any resizing * * @return a policy which does not allow any resizing */ @NotNull public static ResizePolicy none() { return ourNone; } /** * Returns a resize policy allowing horizontal resizing only * * @return a policy which allows horizontal resizing only */ @NotNull public static ResizePolicy horizontal() { return ourHorizontal; } /** * Returns a resize policy allowing vertical resizing only * * @return a policy which allows vertical resizing only */ @NotNull public static ResizePolicy vertical() { return ourVertical; } /** * Returns a resize policy allowing scaled / aspect-ratio preserving resizing only * * @return a resize policy allowing scaled / aspect-ratio preserving resizing only */ @NotNull public static ResizePolicy scaled() { return ourScaled; } /** * Returns a resize policy with the specified resizability along the edges and the * given aspect ratio behavior * * @param top whether the top edge is resizable * @param right whether the right edge is resizable * @param bottom whether the bottom edge is resizable * @param left whether the left edge is resizable * @param preserve whether the policy requires the aspect ratio to be preserved * @return a resize policy recording the constraints required by the parameters */ @NotNull public static ResizePolicy create(boolean top, boolean right, boolean bottom, boolean left, boolean preserve) { int mask = NONE; if (top) mask |= TOP_EDGE; if (right) mask |= RIGHT_EDGE; if (bottom) mask |= BOTTOM_EDGE; if (left) mask |= LEFT_EDGE; if (preserve) mask |= PRESERVE_RATIO; return new ResizePolicy(mask); } /** * Returns the {@link ResizePolicy} for the given policy description. * * @param resize the string describing the resize policy; one of "full", "none", * "horizontal", "vertical", or "scaled" * @return the {@link ResizePolicy} for the widget, which will never be null (but may * be the default of {@link ResizePolicy#full()} if no metadata is found for * the given widget) */ @Nullable public static ResizePolicy get(@Nullable @NonNls String resize) { if (resize != null && resize.length() > 0) { if ("full".equals(resize)) { return full(); } else if ("none".equals(resize)) { return none(); } else if ("horizontal".equals(resize)) { return horizontal(); } else if ("vertical".equals(resize)) { return vertical(); } else if ("scaled".equals(resize)) { return scaled(); } else { assert false : resize; } } return null; } /** * Returns the {@link ResizePolicy} for the given component * * @param component the component to look up a resize policy for * @return a suitable {@linkplain ResizePolicy} */ @NotNull public static ResizePolicy getResizePolicy(@NotNull RadViewComponent component) { PaletteItem paletteItem = component.getInitialPaletteItem(); if (paletteItem instanceof AndroidVariationPaletteItem) { AndroidVariationPaletteItem item = (AndroidVariationPaletteItem)paletteItem; ResizePolicy policy = item.getResizePolicy(); if (policy != null) { return policy; } } return component.getMetaModel().getResizePolicy(); } public boolean applies(@NotNull DirectionResizePoint point) { return applies(point.getDirection()); } protected boolean applies(int direction) { switch (direction) { case Position.EAST: return rightAllowed(); case Position.WEST: return leftAllowed(); case Position.NORTH: return topAllowed(); case Position.SOUTH: return bottomAllowed(); case Position.NORTH_EAST: return rightAllowed() && topAllowed(); case Position.SOUTH_EAST: return rightAllowed() && bottomAllowed(); case Position.SOUTH_WEST: return rightAllowed() && bottomAllowed(); case Position.NORTH_WEST: return rightAllowed() && topAllowed(); default: return false; } } }
/* * Licensed to ElasticSearch and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. ElasticSearch licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper.string; import org.apache.lucene.index.FieldInfo; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.MapperTestUtils; import org.junit.Test; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.*; /** */ public class SimpleStringMappingTests { @Test public void testLimit() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").field("ignore_above", 5).endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = MapperTestUtils.newParser().parse(mapping); ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "1234") .endObject() .bytes()); assertThat(doc.rootDoc().getField("field"), notNullValue()); doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "12345") .endObject() .bytes()); assertThat(doc.rootDoc().getField("field"), notNullValue()); doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "123456") .endObject() .bytes()); assertThat(doc.rootDoc().getField("field"), nullValue()); } @Test public void testDefaultsForAnalyzed() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = MapperTestUtils.newParser().parse(mapping); ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "1234") .endObject() .bytes()); assertThat(doc.rootDoc().getField("field").fieldType().omitNorms(), equalTo(false)); assertThat(doc.rootDoc().getField("field").fieldType().indexOptions(), equalTo(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS)); assertThat(doc.rootDoc().getField("field").fieldType().storeTermVectors(), equalTo(false)); assertThat(doc.rootDoc().getField("field").fieldType().storeTermVectorOffsets(), equalTo(false)); assertThat(doc.rootDoc().getField("field").fieldType().storeTermVectorPositions(), equalTo(false)); assertThat(doc.rootDoc().getField("field").fieldType().storeTermVectorPayloads(), equalTo(false)); } @Test public void testDefaultsForNotAnalyzed() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").field("index", "not_analyzed").endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = MapperTestUtils.newParser().parse(mapping); ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "1234") .endObject() .bytes()); assertThat(doc.rootDoc().getField("field").fieldType().omitNorms(), equalTo(true)); assertThat(doc.rootDoc().getField("field").fieldType().indexOptions(), equalTo(FieldInfo.IndexOptions.DOCS_ONLY)); assertThat(doc.rootDoc().getField("field").fieldType().storeTermVectors(), equalTo(false)); assertThat(doc.rootDoc().getField("field").fieldType().storeTermVectorOffsets(), equalTo(false)); assertThat(doc.rootDoc().getField("field").fieldType().storeTermVectorPositions(), equalTo(false)); assertThat(doc.rootDoc().getField("field").fieldType().storeTermVectorPayloads(), equalTo(false)); // now test it explicitly set mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").field("index", "not_analyzed").field("omit_norms", false).field("index_options", "freqs").endObject().endObject() .endObject().endObject().string(); defaultMapper = MapperTestUtils.newParser().parse(mapping); doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "1234") .endObject() .bytes()); assertThat(doc.rootDoc().getField("field").fieldType().omitNorms(), equalTo(false)); assertThat(doc.rootDoc().getField("field").fieldType().indexOptions(), equalTo(FieldInfo.IndexOptions.DOCS_AND_FREQS)); assertThat(doc.rootDoc().getField("field").fieldType().storeTermVectors(), equalTo(false)); assertThat(doc.rootDoc().getField("field").fieldType().storeTermVectorOffsets(), equalTo(false)); assertThat(doc.rootDoc().getField("field").fieldType().storeTermVectorPositions(), equalTo(false)); assertThat(doc.rootDoc().getField("field").fieldType().storeTermVectorPayloads(), equalTo(false)); } @Test public void testTermVectors() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1") .field("type", "string") .field("term_vector", "no") .endObject() .startObject("field2") .field("type", "string") .field("term_vector", "yes") .endObject() .startObject("field3") .field("type", "string") .field("term_vector", "with_offsets") .endObject() .startObject("field4") .field("type", "string") .field("term_vector", "with_positions") .endObject() .startObject("field5") .field("type", "string") .field("term_vector", "with_positions_offsets") .endObject() .startObject("field6") .field("type", "string") .field("term_vector", "with_positions_offsets_payloads") .endObject() .endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = MapperTestUtils.newParser().parse(mapping); ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder() .startObject() .field("field1", "1234") .field("field2", "1234") .field("field3", "1234") .field("field4", "1234") .field("field5", "1234") .field("field6", "1234") .endObject() .bytes()); assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectors(), equalTo(false)); assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorOffsets(), equalTo(false)); assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorPositions(), equalTo(false)); assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorPayloads(), equalTo(false)); assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectors(), equalTo(true)); assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorOffsets(), equalTo(false)); assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorPositions(), equalTo(false)); assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorPayloads(), equalTo(false)); assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectors(), equalTo(true)); assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorOffsets(), equalTo(true)); assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorPositions(), equalTo(false)); assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorPayloads(), equalTo(false)); assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectors(), equalTo(true)); assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorOffsets(), equalTo(false)); assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorPositions(), equalTo(true)); assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorPayloads(), equalTo(false)); assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectors(), equalTo(true)); assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorOffsets(), equalTo(true)); assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorPositions(), equalTo(true)); assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorPayloads(), equalTo(false)); assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectors(), equalTo(true)); assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorOffsets(), equalTo(true)); assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPositions(), equalTo(true)); assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPayloads(), equalTo(true)); } }
package net.drewke.tdme.engine; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; import java.util.Iterator; import net.drewke.tdme.engine.fileio.textures.PNG; import net.drewke.tdme.engine.model.Color4; import net.drewke.tdme.engine.physics.CollisionDetection; import net.drewke.tdme.engine.primitives.ConvexMesh; import net.drewke.tdme.engine.primitives.LineSegment; import net.drewke.tdme.engine.primitives.PrimitiveModel; import net.drewke.tdme.engine.primitives.Triangle; import net.drewke.tdme.engine.subsystems.lighting.LightingShader; import net.drewke.tdme.engine.subsystems.manager.MeshManager; import net.drewke.tdme.engine.subsystems.manager.TextureManager; import net.drewke.tdme.engine.subsystems.manager.VBOManager; import net.drewke.tdme.engine.subsystems.object.Object3DVBORenderer; import net.drewke.tdme.engine.subsystems.particlesystem.ParticleSystemEntity; import net.drewke.tdme.engine.subsystems.particlesystem.ParticlesShader; import net.drewke.tdme.engine.subsystems.renderer.GL2Renderer; import net.drewke.tdme.engine.subsystems.renderer.GL3Renderer; import net.drewke.tdme.engine.subsystems.renderer.GLES2Renderer; import net.drewke.tdme.engine.subsystems.renderer.GLRenderer; import net.drewke.tdme.engine.subsystems.shadowmapping.ShadowMapping; import net.drewke.tdme.engine.subsystems.shadowmapping.ShadowMappingShaderPre; import net.drewke.tdme.engine.subsystems.shadowmapping.ShadowMappingShaderRender; import net.drewke.tdme.gui.GUI; import net.drewke.tdme.gui.renderer.GUIRenderer; import net.drewke.tdme.gui.renderer.GUIShader; import net.drewke.tdme.math.Matrix4x4; import net.drewke.tdme.math.Vector2; import net.drewke.tdme.math.Vector3; import net.drewke.tdme.math.Vector4; import net.drewke.tdme.os.FileSystem; import net.drewke.tdme.utils.ArrayList; import net.drewke.tdme.utils.Console; import net.drewke.tdme.utils.HashMap; import com.jogamp.opengl.DebugGL2; import com.jogamp.opengl.DebugGL3; import com.jogamp.opengl.DebugGLES2; import com.jogamp.opengl.GL; import com.jogamp.opengl.GL2; import com.jogamp.opengl.GL3; import com.jogamp.opengl.GLAutoDrawable; import com.jogamp.opengl.GLContext; import com.jogamp.opengl.GLES2; import com.jogamp.opengl.GLProfile; /** * Engine main class * @author Andreas Drewke * @version $Id$ */ public final class Engine { protected static Engine instance = null; protected static GLRenderer renderer; private static TextureManager textureManager = null; private static VBOManager vboManager = null; private static MeshManager meshManager = null; private static GUIRenderer guiRenderer = null; public enum AnimationProcessingTarget {CPU, CPU_NORENDERING}; public static AnimationProcessingTarget animationProcessingTarget = AnimationProcessingTarget.CPU; protected static ShadowMappingShaderPre shadowMappingShaderPre = null; protected static ShadowMappingShaderRender shadowMappingShaderRender = null; protected static LightingShader lightingShader = null; protected static ParticlesShader particlesShader = null; protected static GUIShader guiShader = null; private int width; private int height; private GUI gui; private Timing timing; private Camera camera; protected Partition partition; private Light[] lights; private Color4 sceneColor; private FrameBuffer frameBuffer; private ShadowMapping shadowMapping; private HashMap<String,Entity> entitiesById; protected ArrayList<Object3D> objects; private ArrayList<Object3D> visibleObjects; private ArrayList<ObjectParticleSystemEntity> visibleOpses; protected ArrayList<PointsParticleSystemEntity> ppses; private ArrayList<PointsParticleSystemEntity> visiblePpses; protected Object3DVBORenderer object3DVBORenderer; protected HashMap<String,ParticleSystemEntity> particleSystemEntitiesById; private boolean shadowMappingEnabled; private boolean renderingInitiated; private boolean renderingComputedTransformations; private Matrix4x4 modelViewMatrix; private Matrix4x4 projectionMatrix; private Matrix4x4 tmpMatrix4x4; private Vector3 tmpVector3a; private Vector3 tmpVector3b; private Vector3 tmpVector3c; private Vector3 tmpVector3d; private Vector3 tmpVector3f; private Vector3 tmpVector3e; private Vector4 tmpVector4a; private Vector4 tmpVector4b; private LineSegment lineSegment; protected boolean initialized; /** * Returns engine instance * @return */ public static Engine getInstance() { if (instance == null) { instance = new Engine(); } return instance; } /** * Creates an offscreen rendering instance * Note: * - the root engine must have been initialized before * - the created offscreen engine must not be initialized * * @return off screen engine */ public static Engine createOffScreenInstance(GLAutoDrawable drawable, int width, int height) { if (instance == null || instance.initialized == false) { Console.println("Engine::createOffScreenInstance(): Engine not created or not initialized."); return null; } // create off screen engine Engine offScreenEngine = new Engine(); offScreenEngine.initialized = true; // create GUI offScreenEngine.gui = new GUI(offScreenEngine, guiRenderer); // create object 3d vbo renderer offScreenEngine.object3DVBORenderer = new Object3DVBORenderer(offScreenEngine, renderer); offScreenEngine.object3DVBORenderer.initialize(); offScreenEngine.frameBuffer = new FrameBuffer( width, height, FrameBuffer.FRAMEBUFFER_DEPTHBUFFER | FrameBuffer.FRAMEBUFFER_COLORBUFFER ); offScreenEngine.frameBuffer.initialize(); // create camera, frustum partition offScreenEngine.camera = new Camera(renderer); offScreenEngine.partition = new PartitionOctTree(); // create lights for (int i = 0; i < offScreenEngine.lights.length; i++) offScreenEngine.lights[i] = new Light(renderer, i); // create shadow mapping if (instance.shadowMappingEnabled == true) { offScreenEngine.shadowMapping = new ShadowMapping(offScreenEngine, renderer, offScreenEngine.object3DVBORenderer); } offScreenEngine.reshape(drawable, 0, 0, width, height); return offScreenEngine; } /** * @return supported GL profile */ public static GLProfile getProfile() { GLProfile glp = null; if (GLProfile.isAvailable(GLProfile.GL3)) { Console.println("TDME::Proposing GL3"); glp = GLProfile.get(GLProfile.GL3); } else if (GLProfile.isAvailable(GLProfile.GL2)) { Console.println("TDME::Proposing GL2"); glp = GLProfile.get(GLProfile.GL2); } else if (GLProfile.isAvailable(GLProfile.GLES2)) { Console.println("TDME::Proposing GLES2"); glp = GLProfile.get(GLProfile.GLES2); } else { Console.println("TDME::No suiting OpenGL profile available!"); return null; } Console.println("TDME::Proposing " + glp + ", GL2 = " + glp.isGL2() + ", GLES2 = " + glp.isGLES2() + ", GL3 = " + glp.isGL3()); return glp; } /** * Updates the renderer with given drawable * @param drawable */ private void updateRenderer(GLAutoDrawable drawable) { if (drawable.getGL().isGL3()) { GL3 gl = (GL3)drawable.getGL().getGL3(); // notify gl context to renderer renderer.setGL(gl); } else if (drawable.getGL().isGL2()) { GL2 gl = (GL2)drawable.getGL().getGL2(); // notify gl context to renderer renderer.setGL(gl); } else if (drawable.getGL().isGLES2()) { GLES2 gl = (GLES2)drawable.getGL().getGLES2(); // notify gl context to renderer renderer.setGL(gl); } else { Console.println("Engine::updateRenderer(): unsupported GL!"); } } /** * Default constructor */ private Engine() { width = 0; height = 0; timing = new Timing(); camera = null; lights = new Light[8]; sceneColor = new Color4(0.0f, 0.0f, 0.0f, 1.0f); frameBuffer = null; entitiesById = new HashMap<String,Entity>(); objects = new ArrayList<Object3D>(); visibleObjects = new ArrayList<Object3D>(); visibleOpses = new ArrayList<ObjectParticleSystemEntity>(); ppses = new ArrayList<PointsParticleSystemEntity>(); visiblePpses = new ArrayList<PointsParticleSystemEntity>(); particleSystemEntitiesById = new HashMap<String, ParticleSystemEntity>(); // shadow mapping shadowMappingEnabled = false; shadowMapping = null; // render process state renderingInitiated = false; renderingComputedTransformations = false; // matrices modelViewMatrix = new Matrix4x4(); projectionMatrix = new Matrix4x4(); // tmp 3d entities tmpMatrix4x4 = new Matrix4x4(); tmpVector3a = new Vector3(); tmpVector3b = new Vector3(); tmpVector3c = new Vector3(); tmpVector3d = new Vector3(); tmpVector3e = new Vector3(); tmpVector3f = new Vector3(); tmpVector4a = new Vector4(); tmpVector4b = new Vector4(); // line segment lineSegment = new LineSegment(); // initialized = false; } /** * @return if initialized and ready to be used */ public boolean isInitialized() { return initialized; } /** * @return width */ public int getWidth() { return width; } /** * @return height */ public int getHeight() { return height; } /** * @return shadow mapping or null if disabled */ public ShadowMapping getShadowMapping() { return shadowMapping; } /** * @return GUI */ public GUI getGUI() { return gui; } /** * @return Timing */ public Timing getTiming() { return timing; } /** * @return Camera */ public Camera getCamera() { return camera; } /** * @return partition */ public Partition getPartition() { return partition; } /** * Set partition * @param partition */ public void setPartition(Partition partition) { this.partition = partition; } /** * @return lights */ public Light[] getLights() { return lights; } /** * @return frame buffer or null */ public FrameBuffer getFrameBuffer() { return frameBuffer; } /** * Returns light at idx (0 <= idx < 8) * @param idx * @return Light */ public Light getLightAt(int idx) { assert(idx >= 0 && idx < 8); return lights[idx]; } /** * @return texture manager */ public TextureManager getTextureManager() { return textureManager; } /** * @return vertex buffer object manager */ public VBOManager getVBOManager() { return vboManager; } /** * @return mesh manager */ public MeshManager getMeshManager() { return meshManager; } /** * @return shadow mapping shader */ public static ShadowMappingShaderPre getShadowMappingShaderPre() { return shadowMappingShaderPre; } /** * @return shadow mapping shader */ public static ShadowMappingShaderRender getShadowMappingShaderRender() { return shadowMappingShaderRender; } /** * @return lighting shader */ public static LightingShader getLightingShader() { return lightingShader; } /** * @return particles shader */ public static ParticlesShader getParticlesShader() { return particlesShader; } /** * @return GUI shader */ public static GUIShader getGUIShader() { return guiShader; } /** * @return object 3d vbo renderer */ public Object3DVBORenderer getObject3DVBORenderer() { return object3DVBORenderer; } /** * @return scene / background color */ public Color4 getSceneColor() { return sceneColor; } /** * @return entity count */ public int getEntityCount() { return entitiesById.size(); } /** * Returns a entity by given id * @param id * @return entity or null */ public Entity getEntity(String id) { return entitiesById.get(id); } /** * Adds an entity by id * @param object */ public void addEntity(Entity entity) { // init object entity.setEngine(this); entity.setRenderer(renderer); entity.initialize(); // dispose old object if any did exist in engine with same id Entity oldEntity = entitiesById.put(entity.getId(), entity); // unload old object if (oldEntity != null) { oldEntity.dispose(); if (oldEntity.isEnabled() == true) partition.removeEntity(oldEntity); } // add to partition if enabled if (entity.isEnabled() == true) partition.addEntity(entity); } /** * Removes an entity * @param id */ public void removeEntity(String id) { Entity entity = entitiesById.remove(id); if (entity != null) { if (entity.isEnabled() == true) partition.removeEntity(entity); entity.dispose(); entity.setEngine(null); entity.setRenderer(null); } } /** * Removes all entities and caches */ public void reset() { Iterator<String> entityKeys = entitiesById.getKeysIterator(); ArrayList<String> entitiesToRemove = new ArrayList<String>(); while(entityKeys.hasNext()) { String entityKey = entityKeys.next(); entitiesToRemove.add(entityKey); } for (int i = 0; i< entitiesToRemove.size(); i++) { removeEntity(entitiesToRemove.get(i)); } partition.reset(); object3DVBORenderer.reset(); CollisionDetection.reset(); } /** * Initialize render engine * @param drawable */ public void initialize(GLAutoDrawable drawable) { initialize(drawable, false); } /** * Initialize render engine * @param drawable * @param debug */ public void initialize(GLAutoDrawable drawable, boolean debug) { // exit if already initialized like a offscreen engine instance if (initialized == true) return; // GLContext glContext = drawable.getGL().getContext(); if (drawable.getGL().isGL3()) { GL3 gl = (GL3)drawable.getGL().getGL3(); if (debug == true) { drawable.setGL(new DebugGL3(gl)); } // use gl3 renderer renderer = new GL3Renderer() { final public void onUpdateProjectionMatrix() { if (lightingShader != null) lightingShader.updateMatrices(this); if (particlesShader != null) particlesShader.updateMatrices(this); if (shadowMapping != null) shadowMapping.updateMVPMatrices(this); } final public void onUpdateCameraMatrix() { if (lightingShader != null) lightingShader.updateMatrices(this); if (particlesShader != null) particlesShader.updateMatrices(this); if (shadowMapping != null) shadowMapping.updateMVPMatrices(this); } final public void onUpdateModelViewMatrix() { if (lightingShader != null) lightingShader.updateMatrices(this); if (particlesShader != null) particlesShader.updateMatrices(this); if (shadowMapping != null) shadowMapping.updateMVPMatrices(this); } final public void onBindTexture(int textureId) { if (lightingShader != null) lightingShader.bindTexture(this, textureId); if (guiShader != null) guiShader.bindTexture(this, textureId); } final public void onUpdateTextureMatrix() { // no op } final public void onUpdateEffect() { if (lightingShader != null) lightingShader.updateEffect(this); if (particlesShader != null) particlesShader.updateEffect(this); if (guiShader != null) guiShader.updateEffect(this); } final public void onUpdateLight(int lightId) { if (lightingShader != null) lightingShader.updateLight(this, lightId); } final public void onUpdateMaterial() { if (lightingShader != null) lightingShader.updateMaterial(this); } }; // notify gl context to renderer renderer.setGL(gl); // print gl version, extensions Console.println("TDME::Using GL3"); Console.println("TDME::Extensions: " + gl.glGetString(GL.GL_EXTENSIONS)); // engine defaults shadowMappingEnabled = true; animationProcessingTarget = AnimationProcessingTarget.CPU; ShadowMapping.setShadowMapSize(2048, 2048); } else if (drawable.getGL().isGL2()) { GL2 gl = (GL2)drawable.getGL().getGL2(); if (debug == true) { drawable.setGL(new DebugGL2(gl)); } // use gl2 renderer renderer = new GL2Renderer() { final public void onUpdateProjectionMatrix() { if (lightingShader != null) lightingShader.updateMatrices(this); if (particlesShader != null) particlesShader.updateMatrices(this); if (shadowMapping != null) shadowMapping.updateMVPMatrices(this); } final public void onUpdateCameraMatrix() { if (lightingShader != null) lightingShader.updateMatrices(this); if (particlesShader != null) particlesShader.updateMatrices(this); if (shadowMapping != null) shadowMapping.updateMVPMatrices(this); } final public void onUpdateModelViewMatrix() { if (lightingShader != null) lightingShader.updateMatrices(this); if (particlesShader != null) particlesShader.updateMatrices(this); if (shadowMapping != null) shadowMapping.updateMVPMatrices(this); } final public void onBindTexture(int textureId) { if (lightingShader != null) lightingShader.bindTexture(this, textureId); if (guiShader != null) guiShader.bindTexture(this, textureId); } final public void onUpdateTextureMatrix() { // no op } final public void onUpdateEffect() { if (lightingShader != null) lightingShader.updateEffect(this); if (particlesShader != null) particlesShader.updateEffect(this); if (guiShader != null) guiShader.updateEffect(this); } final public void onUpdateLight(int lightId) { if (lightingShader != null) lightingShader.updateLight(this, lightId); } final public void onUpdateMaterial() { if (lightingShader != null) lightingShader.updateMaterial(this); } }; // notify gl context to renderer renderer.setGL(gl); // print gl version, extensions Console.println("TDME::Using GL2"); Console.println("TDME::Extensions: " + gl.glGetString(GL.GL_EXTENSIONS)); // engine defaults shadowMappingEnabled = true; animationProcessingTarget = AnimationProcessingTarget.CPU; ShadowMapping.setShadowMapSize(2048, 2048); } else if (drawable.getGL().isGLES2()) { GLES2 gl = (GLES2)drawable.getGL().getGLES2(); if (debug == true) { drawable.setGL(new DebugGLES2(gl)); } // use gl es 2 renderer renderer = new GLES2Renderer() { final public void onUpdateProjectionMatrix() { if (lightingShader != null) lightingShader.updateMatrices(this); if (particlesShader != null) particlesShader.updateMatrices(this); if (shadowMapping != null) shadowMapping.updateMVPMatrices(this); } final public void onUpdateCameraMatrix() { if (lightingShader != null) lightingShader.updateMatrices(this); if (particlesShader != null) particlesShader.updateMatrices(this); if (shadowMapping != null) shadowMapping.updateMVPMatrices(this); } final public void onUpdateModelViewMatrix() { if (lightingShader != null) lightingShader.updateMatrices(this); if (particlesShader != null) particlesShader.updateMatrices(this); if (shadowMapping != null) shadowMapping.updateMVPMatrices(this); } final public void onBindTexture(int textureId) { if (lightingShader != null) lightingShader.bindTexture(this, textureId); if (guiShader != null) guiShader.bindTexture(this, textureId); } final public void onUpdateTextureMatrix() { // no op } final public void onUpdateEffect() { if (lightingShader != null) lightingShader.updateEffect(this); if (particlesShader != null) particlesShader.updateEffect(this); if (guiShader != null) guiShader.updateEffect(this); } final public void onUpdateLight(int lightId) { if (lightingShader != null) lightingShader.updateLight(this, lightId); } final public void onUpdateMaterial() { if (lightingShader != null) lightingShader.updateMaterial(this); } }; // notify gl context to renderer renderer.setGL(gl); // print gl version, extensions Console.println("TDME::Using GLES2"); Console.println("TDME::Extensions: " + gl.glGetString(GL.GL_EXTENSIONS)); // engine defaults // is shadow mapping available? if (renderer.isBufferObjectsAvailable() == true && renderer.isDepthTextureAvailable() == true) { // yep, nice shadowMappingEnabled = true; animationProcessingTarget = AnimationProcessingTarget.CPU; ShadowMapping.setShadowMapSize(512, 512); } else { // nope, renderer skinning on GPU to speed up things and do not shadow mapping shadowMappingEnabled = false; animationProcessingTarget = AnimationProcessingTarget.CPU; } } else { Console.println("Engine::initialize(): unsupported GL!"); return; } // init initialized = true; renderer.initialize(); renderer.renderingTexturingClientState = false; // create manager textureManager = new TextureManager(renderer); vboManager = new VBOManager(renderer); meshManager = new MeshManager(); // create object 3d vbo renderer object3DVBORenderer = new Object3DVBORenderer(this, renderer); object3DVBORenderer.initialize(); // create GUI guiRenderer = new GUIRenderer(renderer); guiRenderer.initialize(); gui = new GUI(this, guiRenderer); gui.initialize(); // create camera camera = new Camera(renderer); partition = new PartitionOctTree(); // create lights for (int i = 0; i < lights.length; i++) lights[i] = new Light(renderer, i); // create lighting shader lightingShader = new LightingShader(renderer); lightingShader.initialize(); // create particles shader particlesShader = new ParticlesShader(this, renderer); particlesShader.initialize(); // create GUI shader guiShader = new GUIShader(renderer); guiShader.initialize(); // check if VBOs are available if (renderer.isBufferObjectsAvailable()) { Console.println("TDME::VBOs are available."); } else { Console.println("TDME::VBOs are not available! Engine will not work!"); initialized = false; } // check FBO support if (glContext.hasBasicFBOSupport() == false) { Console.println("TDME::Basic FBOs are not available!"); shadowMappingEnabled = false; } else { Console.println("TDME::Basic FBOs are available."); } // initialize shadow mapping if (shadowMappingEnabled == true) { Console.println("TDME::Using shadow mapping"); shadowMappingShaderPre = new ShadowMappingShaderPre(renderer); shadowMappingShaderPre.initialize(); shadowMappingShaderRender = new ShadowMappingShaderRender(renderer); shadowMappingShaderRender.initialize(); shadowMapping = new ShadowMapping(this, renderer, object3DVBORenderer); } else { Console.println("TDME::Not using shadow mapping"); } // print out animation processing target Console.println("TDME: animation processing target: " + animationProcessingTarget); // determine initialized from sub systems initialized&= shadowMappingShaderPre == null?true:shadowMappingShaderPre.isInitialized(); initialized&= shadowMappingShaderRender == null?true:shadowMappingShaderRender.isInitialized(); initialized&= lightingShader.isInitialized(); initialized&= particlesShader.isInitialized(); initialized&= guiShader.isInitialized(); // Console.println("TDME::initialized & ready: " + initialized); } /** * Reshape * @param drawable * @param x * @param y * @param width * @param height */ public void reshape(GLAutoDrawable drawable, int x, int y, int width, int height) { // update our width and height this.width = width; this.height = height; // update renderer updateRenderer(drawable); // update frame buffer if we have one if (frameBuffer != null) { frameBuffer.reshape(width, height); } // update shadow mapping if (shadowMapping != null) { shadowMapping.reshape(width, height); } // update GUI system gui.reshape(width, height); } /** * Initiates the rendering process * updates timing, updates camera * @param drawable */ private void initRendering(GLAutoDrawable drawable) { // update renderer updateRenderer(drawable); // update timing timing.updateTiming(); // update camera camera.update(width, height); // clear lists of known objects objects.clear(); ppses.clear(); // clear lists of visible objects visibleObjects.clear(); visibleOpses.clear(); visiblePpses.clear(); // renderingInitiated = true; } /** * Computes visibility and transformations * @param drawable */ public void computeTransformations(GLAutoDrawable drawable) { // init rendering if not yet done if (renderingInitiated == false) initRendering(drawable); // do particle systems auto emit for (Entity entity: entitiesById.getValuesIterator()) { // skip on disabled entities if (entity.isEnabled() == false) continue; // object particle system entity if (entity instanceof ParticleSystemEntity) { ParticleSystemEntity pse = (ParticleSystemEntity)entity; // do auto emit if (pse.isAutoEmit() == true) { pse.emitParticles(); pse.updateParticles(); } } } // add visible entities to related lists for(Entity entity: partition.getVisibleEntities(camera.getFrustum())) { // object 3d if (entity instanceof Object3D) { Object3D object = (Object3D)entity; // compute transformations object.computeTransformations(); // add to visible objects visibleObjects.add(object); } else // object particle system entity if (entity instanceof ObjectParticleSystemEntity) { ObjectParticleSystemEntity opse = (ObjectParticleSystemEntity)entity; visibleObjects.addAll(opse.getEnabledObjects()); visibleOpses.add(opse); } else // points particle system entity if (entity instanceof PointsParticleSystemEntity) { PointsParticleSystemEntity ppse = (PointsParticleSystemEntity)entity; visiblePpses.add(ppse); } } // renderingComputedTransformations = true; } /** * Renders the scene * @param drawable */ public void display(GLAutoDrawable drawable) { // do pre rendering steps if (renderingInitiated == false) initRendering(drawable); if (renderingComputedTransformations == false) computeTransformations(drawable); // init frame Engine.renderer.initializeFrame(); // enable vertex and normal arrays, we always have them Engine.renderer.enableClientState(Engine.renderer.CLIENTSTATE_VERTEX_ARRAY); Engine.renderer.enableClientState(Engine.renderer.CLIENTSTATE_NORMAL_ARRAY); // set up camera camera.update(width, height); // render shadow maps if (shadowMapping != null) shadowMapping.createShadowMaps(objects); // switch back to framebuffer if we have one if (frameBuffer != null) { frameBuffer.enableFrameBuffer(); } else { FrameBuffer.disableFrameBuffer(); } // restore camera from shadow map rendering camera.update(width, height); // set up clear color Engine.renderer.setClearColor( sceneColor.getRed(), sceneColor.getGreen(), sceneColor.getBlue(), sceneColor.getAlpha() ); // clear previous frame values renderer.clear(renderer.CLEAR_DEPTH_BUFFER_BIT | renderer.CLEAR_COLOR_BUFFER_BIT); // enable materials renderer.setMaterialEnabled(); // use lighting shader if (lightingShader != null) { lightingShader.useProgram(); } // update lights for (int j = 0; j < lights.length; j++) { lights[j].update(); } // render objects object3DVBORenderer.render(visibleObjects, true); // setup up gl3 stuff if (lightingShader != null) { lightingShader.unUseProgram(); } // render shadows if required if (shadowMapping != null) shadowMapping.renderShadowMaps(visibleObjects); // disable materials renderer.setMaterialDisabled(); // use particle shader if (particlesShader != null) { particlesShader.useProgram(); } // render points based particle systems object3DVBORenderer.render(visiblePpses); // unuse particle shader if (particlesShader != null) { particlesShader.unUseProgram(); } // disable vertex and normal arrays Engine.renderer.disableClientState(Engine.renderer.CLIENTSTATE_VERTEX_ARRAY); Engine.renderer.disableClientState(Engine.renderer.CLIENTSTATE_NORMAL_ARRAY); Engine.renderer.disableClientState(Engine.renderer.CLIENTSTATE_TEXTURECOORD_ARRAY); // clear pre render states renderingInitiated = false; renderingComputedTransformations = false; renderer.renderingTexturingClientState = false; // store matrices modelViewMatrix.set(renderer.getModelViewMatrix()); projectionMatrix.set(renderer.getProjectionMatrix()); // unuse framebuffer if we have one if (frameBuffer != null) FrameBuffer.disableFrameBuffer(); } /** * Compute world coordinate from mouse position and z value * @param mouse x * @param mouse y * @param z * @param world coordinate */ public void computeWorldCoordinateByMousePosition(int mouseX, int mouseY, float z, Vector3 worldCoordinate) { // http://stackoverflow.com/questions/7692988/opengl-math-projecting-screen-space-to-world-space-coords-solved tmpMatrix4x4.set(modelViewMatrix).multiply(projectionMatrix).invert(); tmpMatrix4x4.multiply( tmpVector4a.set( (2.0f * mouseX / width) - 1.0f, 1.0f - (2.0f * mouseY / height), 2.0f * z - 1.0f, 1.0f ), tmpVector4b ); tmpVector4b.scale(1.0f / tmpVector4b.getW()); worldCoordinate.set(tmpVector4b.getArray()); } /** * Compute world coordinate from mouse position * TODO: * this does not work with GLES2 * @param mouse x * @param mouse y * @param world coordinate */ public void computeWorldCoordinateByMousePosition(int mouseX, int mouseY, Vector3 worldCoordinate) { // use framebuffer if we have one if (frameBuffer != null) frameBuffer.enableFrameBuffer(); // http://stackoverflow.com/questions/7692988/opengl-math-projecting-screen-space-to-world-space-coords-solved float z = renderer.readPixelDepth(mouseX, height - mouseY); // unuse framebuffer if we have one if (frameBuffer != null) FrameBuffer.disableFrameBuffer(); // computeWorldCoordinateByMousePosition(mouseX, mouseY, z, worldCoordinate); } /** * Retrieves object by mouse position * @param mouse x * @param mouse y * @return entity or null */ public Entity getObjectByMousePosition(int mouseX, int mouseY) { return getObjectByMousePosition(mouseX, mouseY, null); } /** * Retrieves object by mouse position * @param mouse x * @param mouse y * @param filter * @return entity or null */ public Entity getObjectByMousePosition(int mouseX, int mouseY, EntityPickingFilter filter) { computeWorldCoordinateByMousePosition(mouseX, mouseY, 0f, tmpVector3a); computeWorldCoordinateByMousePosition(mouseX, mouseY, 1f, tmpVector3b); // selected entity float selectedEntityDistance = Float.MAX_VALUE; Entity selectedEntity = null; // iterate visible objects for (int i = 0; i < visibleObjects.size(); i++) { Object3D entity = visibleObjects.get(i); if (entity.isPickable() == false) continue; if (filter != null && filter.filterEntity(entity) == false) continue; if (lineSegment.doesBoundingBoxCollideWithLineSegment(entity.getBoundingBoxTransformed(), tmpVector3a, tmpVector3b, tmpVector3c, tmpVector3d) == true) { for (Vector3[] vertices: entity.getTransformedFacesIterator()) { if (lineSegment.doesLineSegmentCollideWithTriangle( vertices[0], vertices[1], vertices[2], tmpVector3a, tmpVector3b, tmpVector3e ) == true) { float entityDistance = tmpVector3e.sub(tmpVector3a).computeLength(); // check if not yet selected entity or its distance smaller than previous match if (selectedEntity == null || entityDistance < selectedEntityDistance) { selectedEntity = entity; selectedEntityDistance = entityDistance; } } } } } // iterate visible object particle system entities for (int i = 0; i < visibleOpses.size(); i++) { ObjectParticleSystemEntity entity = visibleOpses.get(i); if (entity.isPickable() == false) continue; if (filter != null && filter.filterEntity(entity) == false) continue; if (lineSegment.doesBoundingBoxCollideWithLineSegment(entity.getBoundingBoxTransformed(), tmpVector3a, tmpVector3b, tmpVector3c, tmpVector3d) == true) { float entityDistance = tmpVector3e.set(entity.getBoundingBoxTransformed().getCenter()).sub(tmpVector3a).computeLength(); // check if not yet selected entity or its distance smaller than previous match if (selectedEntity == null || entityDistance < selectedEntityDistance) { selectedEntity = entity; selectedEntityDistance = entityDistance; } } } // iterate visible point particle system entities for (int i = 0; i < visiblePpses.size(); i++) { PointsParticleSystemEntity entity = visiblePpses.get(i); if (entity.isPickable() == false) continue; if (filter != null && filter.filterEntity(entity) == false) continue; if (lineSegment.doesBoundingBoxCollideWithLineSegment(entity.getBoundingBoxTransformed(), tmpVector3a, tmpVector3b, tmpVector3c, tmpVector3d) == true) { float entityDistance = tmpVector3e.set(entity.getBoundingBoxTransformed().getCenter()).sub(tmpVector3a).computeLength(); // check if not yet selected entity or its distance smaller than previous match if (selectedEntity == null || entityDistance < selectedEntityDistance) { selectedEntity = entity; selectedEntityDistance = entityDistance; } } } // return selectedEntity; } /** * Convert screen coordinate by world coordinate * @param world woordinate * @param screen coordinate */ public void computeScreenCoordinateByWorldCoordinate(Vector3 worldCoordinate, Vector2 screenCoordinate) { // convert to normalized device coordinates tmpMatrix4x4. set(modelViewMatrix). multiply( projectionMatrix ); tmpMatrix4x4.multiply(new Vector4(worldCoordinate, 1.0f), tmpVector4a); tmpVector4a.scale(1.0f / tmpVector4a.getW()); float[] screenCoordinateXYZW = tmpVector4a.getArray(); // convert to screen coordinate screenCoordinate.setX((screenCoordinateXYZW[0] + 1.0f) * width / 2f); screenCoordinate.setY(height - ((screenCoordinateXYZW[1] + 1.0f) * height / 2f)); } /** * Shutdown the engine * @param drawable */ public void dispose(GLAutoDrawable drawable) { // update renderer updateRenderer(drawable); // dispose entities Iterator<String> entityKeys = entitiesById.getKeysIterator(); ArrayList<String> entitiesToRemove = new ArrayList<String>(); while(entityKeys.hasNext()) { String entityKey = entityKeys.next(); entitiesToRemove.add(entityKey); } for (int i = 0; i< entitiesToRemove.size(); i++) { removeEntity(entitiesToRemove.get(i)); } // dispose shadow mapping if (shadowMapping != null) { shadowMapping.dispose(); shadowMapping = null; } // dispose frame buffer if (frameBuffer != null) { frameBuffer.dispose(); frameBuffer = null; } // dispose GUI gui.dispose(); // if disposing main engine if (this == Engine.instance) { guiRenderer.dispose(); } } /** * Set up GUI mode rendering * @param drawable */ public void initGUIMode() { // use framebuffer if we have one if (frameBuffer != null) frameBuffer.enableFrameBuffer(); // renderer.initGuiMode(); } /** * Set up GUI mode rendering * @param drawable */ public void doneGUIMode() { renderer.doneGuiMode(); // unuse framebuffer if we have one if (frameBuffer != null) FrameBuffer.disableFrameBuffer(); } /** * Creates a PNG file from current screen * TODO: * this does not seem to work with GLES2 and offscreen engines * @param path name * @param file name * @return success */ public boolean makeScreenshot(String pathName, String fileName) { // use framebuffer if we have one if (frameBuffer != null) frameBuffer.enableFrameBuffer(); // fetch pixel ByteBuffer pixels = renderer.readPixels(0, 0, width, height); if (pixels == null) return false; // FileOutputStream fos = null; try { fos = new FileOutputStream(pathName + File.separator + fileName); PNG.save(width, height, pixels, fos); } catch (IOException ioe) { Console.println("Engine::makeScreenshot(): failed: " + ioe.getMessage()); return false; } finally { if (fos != null) try { fos.close(); } catch (IOException ioe2) {} } // unuse framebuffer if we have one if (frameBuffer != null) FrameBuffer.disableFrameBuffer(); // return true; } /** * Retrieves an input stream for a tdme.jar packaged file or from filesystem * @param file name * @param path name * @return */ public InputStream getInputStream(String pathName, String fileName) throws IOException { // check file system first try { return FileSystem.getInstance().getInputStream(pathName, fileName); } catch (IOException ioe) { // no op } // check tdme jar next InputStream is = this.getClass().getClassLoader().getResourceAsStream(pathName + "/" + fileName); if (is == null) throw new FileNotFoundException(pathName + "/" + fileName); return is; } }
/** * Copyright 2005-2014 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.tooling.archetype.builder; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.StringReader; import java.io.StringWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Pattern; import io.fabric8.tooling.archetype.ArchetypeUtils; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.IOUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.w3c.dom.Text; import org.xml.sax.InputSource; /** * This class is a replacement for <code>mvn archetype:create-from-project</code> without dependencies to * maven-archetype related libraries. */ public class ArchetypeBuilder { public static Logger LOG = LoggerFactory.getLogger(ArchetypeBuilder.class); private static final Set<String> sourceFileExtensions = new HashSet<String>(Arrays.asList( "bpmn", "csv", "drl", "html", "groovy", "jade", "java", "jbpm", "js", "json", "jsp", "kotlin", "ks", "md", "properties", "scala", "ssp", "ts", "txt", "xml" )); private ArchetypeUtils archetypeUtils = new ArchetypeUtils(); private File bomFile; private File catalogXmlFile; private PrintWriter printWriter; private final Map<String, String> versionProperties = new HashMap<>(); private int indentSize = 2; private String indent = " "; public ArchetypeBuilder(File catalogXmlFile) { this.catalogXmlFile = catalogXmlFile; } public void setIndentSize(int indentSize) { this.indentSize = Math.min(indentSize <= 0 ? 0 : indentSize, 8); indent = ""; for (int c = 0; c < this.indentSize; c++) { indent += " "; } } public void setBomFile(File bomFile) { this.bomFile = bomFile; } /** * Starts generation of Archetype Catalog (see: http://maven.apache.org/xsd/archetype-catalog-1.0.0.xsd) * * @throws IOException */ public void configure() throws IOException { catalogXmlFile.getParentFile().mkdirs(); LOG.info("Writing catalog: " + catalogXmlFile); printWriter = new PrintWriter(new OutputStreamWriter(new FileOutputStream(catalogXmlFile), "UTF-8")); printWriter.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<archetype-catalog xmlns=\"http://maven.apache.org/plugins/maven-archetype-plugin/archetype-catalog/1.0.0\"\n" + indent + indent + "xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + indent + indent + "xsi:schemaLocation=\"http://maven.apache.org/plugins/maven-archetype-plugin/archetype-catalog/1.0.0 http://maven.apache.org/xsd/archetype-catalog-1.0.0.xsd\">\n" + indent + "<archetypes>"); if (bomFile != null && bomFile.exists()) { // read all properties of the bom, so we have default values for ${ } placeholders String text = FileUtils.readFileToString(bomFile); Document doc = archetypeUtils.parseXml(new InputSource(new StringReader(text))); Element root = doc.getDocumentElement(); // lets load all the properties defined in the <properties> element in the bom pom. NodeList propertyElements = root.getElementsByTagName("properties"); if (propertyElements.getLength() > 0) { Element propertyElement = (Element) propertyElements.item(0); NodeList children = propertyElement.getChildNodes(); for (int cn = 0; cn < children.getLength(); cn++) { Node e = children.item(cn); if (e instanceof Element) { versionProperties.put(e.getNodeName(), e.getTextContent()); } } } if (LOG.isDebugEnabled()) { for (Map.Entry<String, String> entry : versionProperties.entrySet()) { LOG.debug("bom property: {}={}", entry.getKey(), entry.getValue()); } } } } /** * Completes generation of Archetype Catalog. */ public void close() { printWriter.println(indent + "</archetypes>\n" + "</archetype-catalog>"); printWriter.close(); } /** * Iterates through all nested directories and generates archetypes for all found, non-pom Maven projects. * * @param baseDir a directory to look for projects which may be converted to Maven Archetypes * @param outputDir target directory where Maven Archetype projects will be generated * @param clean regenerate the archetypes (clean the archetype target dir)? * @throws IOException */ public void generateArchetypes(String containerType, File baseDir, File outputDir, boolean clean, List<String> dirs, File karafProfileDir) throws IOException { LOG.debug("Generating archetypes from {} to {}", baseDir.getCanonicalPath(), outputDir.getCanonicalPath()); File[] files = baseDir.listFiles(); if (files != null) { for (File file: files) { if (file.isDirectory()) { File projectDir = file; File projectPom = new File(projectDir, "pom.xml"); if (projectPom.exists() && !skipImport(projectDir) && archetypeUtils.isValidProjectPom(projectPom)) { String fileName = file.getName(); String archetypeDirName = fileName.replace("example", "archetype"); if (fileName.equals(archetypeDirName)) { archetypeDirName += "-archetype"; } archetypeDirName = containerType + "-" + archetypeDirName; File archetypeDir = new File(outputDir, archetypeDirName); generateArchetype(projectDir, projectPom, archetypeDir, clean, dirs); File archetypePom = new File(archetypeDir, "pom.xml"); if (archetypePom.exists()) { addArchetypeMetaData(archetypePom, archetypeDirName); } } } } } } /** * We should skip importing some quickstarts and if so, we should also not create an archetype for it */ private static boolean skipImport(File dir) { String[] files = dir.list(); if (files != null) { for (String name : files) { if (".skipimport".equals(name)) { return true; } } } return false; } /** * Generates Maven archetype from existing project. This is lightweight version of <code>mvn archetype:create-from-project</code>. * * @param projectDir directory of source project which will be converted to Maven Archetype * @param projectPom pom file of source project * @param archetypeDir output directory where Maven Archetype project will be created * @param clean remove the archetypeDir entirely? * @throws IOException */ private void generateArchetype(File projectDir, File projectPom, File archetypeDir, boolean clean, List<String> dirs) throws IOException { LOG.debug("Generating archetype from {} to {}", projectDir.getName(), archetypeDir.getCanonicalPath()); // add to dirs dirs.add(archetypeDir.getName()); File srcDir = new File(projectDir, "src/main"); File testDir = new File(projectDir, "src/test"); File outputSrcDir = new File(archetypeDir, "src"); File outputGitIgnoreFile = new File(archetypeDir, ".gitignore"); if (clean) { LOG.debug("Removing generated archetype dir {}", archetypeDir); FileUtils.deleteDirectory(archetypeDir); } else if (outputSrcDir.exists() && outputGitIgnoreFile.exists() && fileIncludesLine(outputGitIgnoreFile, "src")) { LOG.debug("Removing generated src dir {}", outputSrcDir); FileUtils.deleteDirectory(outputSrcDir); if (outputSrcDir.exists()) { throw new RuntimeException("The projectDir " + outputSrcDir + " should not exist!"); } } // Main dir for archetype resources - copied from original maven project. Sources will have // package names replaced with variable placeholders - to make them parameterizable during // mvn archetype:generate File archetypeOutputDir = new File(archetypeDir, "src/main/resources/archetype-resources"); // optional archetype-metadata.xml provided by source project // File metadataXmlFile = new File(projectDir, "archetype-metadata.xml"); // target archetype-metadata.xml file. it'll end in resources-filtered, so most of variables will be replaced // during the build of archetype project File metadataXmlOutFile = new File(archetypeDir, "src/main/resources-filtered/META-INF/maven/archetype-metadata.xml"); Replacement replaceFunction = new IdentityReplacement(); File mainSrcDir = null; for (String it : ArchetypeUtils.sourceCodeDirNames) { File dir = new File(srcDir, it); if (dir.exists()) { mainSrcDir = dir; break; } } if (mainSrcDir != null) { // lets find the first projectDir which contains more than one child // to find the root-most package File rootPackage = archetypeUtils.findRootPackage(mainSrcDir); if (rootPackage != null) { String packagePath = archetypeUtils.relativePath(mainSrcDir, rootPackage); String packageName = packagePath.replace(File.separatorChar, '.'); LOG.debug("Found root package in {}: {}", mainSrcDir, packageName); final String regex = packageName.replace(".", "\\."); replaceFunction = new Replacement() { @Override public String replace(String token) { return token.replaceAll(regex, "\\${package}"); } }; // lets recursively copy files replacing the package names File outputMainSrc = new File(archetypeOutputDir, archetypeUtils.relativePath(projectDir, mainSrcDir)); copyCodeFiles(rootPackage, outputMainSrc, replaceFunction); // tests copied only if there's something in "src/main" File testSrcDir = null; for (String it : ArchetypeUtils.sourceCodeDirNames) { File dir = new File(testDir, it); if (dir.exists()) { testSrcDir = dir; break; } } if (testSrcDir != null) { File rootTestDir = new File(testSrcDir, packagePath); File outputTestSrc = new File(archetypeOutputDir, archetypeUtils.relativePath(projectDir, testSrcDir)); if (rootTestDir.exists()) { copyCodeFiles(rootTestDir, outputTestSrc, replaceFunction); } else { copyCodeFiles(testSrcDir, outputTestSrc, replaceFunction); } } } } // now copy pom.xml createArchetypeDescriptors(projectPom, archetypeDir, new File(archetypeOutputDir, "pom.xml"), metadataXmlOutFile, replaceFunction); // now lets copy all non-ignored files across copyOtherFiles(projectDir, projectDir, archetypeOutputDir, replaceFunction); // add missing .gitignore if missing if (!outputGitIgnoreFile.exists()) { ArchetypeUtils.writeGitIgnore(outputGitIgnoreFile); } } /** * This method:<ul> * <li>Copies POM from original project to archetype-resources</li> * <li>Generates <code></code>archetype-descriptor.xml</code></li> * <li>Generates Archetype's <code>pom.xml</code> if not present in target directory.</li> * </ul> * * @param projectPom POM file of original project * @param archetypeDir target directory of created Maven Archetype project * @param archetypePom created POM file for Maven Archetype project * @param metadataXmlOutFile generated archetype-metadata.xml file * @param replaceFn replace function * @throws IOException */ private void createArchetypeDescriptors(File projectPom, File archetypeDir, File archetypePom, File metadataXmlOutFile, Replacement replaceFn) throws IOException { LOG.debug("Parsing " + projectPom); String text = replaceFn.replace(FileUtils.readFileToString(projectPom)); // lets update the XML Document doc = archetypeUtils.parseXml(new InputSource(new StringReader(text))); Element root = doc.getDocumentElement(); // let's get some values from the original project String originalArtifactId, originalName, originalDescription; Element artifactIdEl = (Element) findChild(root, "artifactId"); Element nameEl = (Element) findChild(root, "name"); Element descriptionEl = (Element) findChild(root, "description"); if (artifactIdEl != null && artifactIdEl.getTextContent() != null && artifactIdEl.getTextContent().trim().length() > 0) { originalArtifactId = artifactIdEl.getTextContent().trim(); } else { originalArtifactId = archetypeDir.getName(); } if (nameEl != null && nameEl.getTextContent() != null && nameEl.getTextContent().trim().length() > 0) { originalName = nameEl.getTextContent().trim(); } else { originalName = originalArtifactId; } if (descriptionEl != null && descriptionEl.getTextContent() != null && descriptionEl.getTextContent().trim().length() > 0) { originalDescription = descriptionEl.getTextContent().trim(); } else { originalDescription = originalName; } Map<String, String> propertyNameSet = new LinkedHashMap<>(); if (root != null) { // remove the parent element and the following text Node NodeList parents = root.getElementsByTagName("parent"); if (parents.getLength() > 0) { if (parents.item(0).getNextSibling().getNodeType() == Node.TEXT_NODE) { root.removeChild(parents.item(0).getNextSibling()); } root.removeChild(parents.item(0)); } // lets load all the properties defined in the <properties> element in the pom. Map<String, String> pomProperties = new LinkedHashMap<>(); NodeList propertyElements = root.getElementsByTagName("properties"); if (propertyElements.getLength() > 0) { Element propertyElement = (Element) propertyElements.item(0); NodeList children = propertyElement.getChildNodes(); for (int cn = 0; cn < children.getLength(); cn++) { Node e = children.item(cn); if (e instanceof Element) { pomProperties.put(e.getNodeName(), e.getTextContent()); } } } if (LOG.isDebugEnabled()) { for (Map.Entry<String, String> entry : pomProperties.entrySet()) { LOG.debug("pom property: {}={}", entry.getKey(), entry.getValue()); } } // lets find all the property names NodeList children = root.getElementsByTagName("*"); for (int cn = 0; cn < children.getLength(); cn++) { Node e = children.item(cn); if (e instanceof Element) { //val text = e.childrenText String cText = e.getTextContent(); String prefix = "${"; if (cText.startsWith(prefix)) { int offset = prefix.length(); int idx = cText.indexOf("}", offset + 1); if (idx > 0) { String name = cText.substring(offset, idx); if (!pomProperties.containsKey(name) && isValidRequiredPropertyName(name)) { // use default value if we have one, but favor value from this pom over the bom pom String value = pomProperties.get(name); if (value == null) { value = versionProperties.get(name); } propertyNameSet.put(name, value); } } } } } String profile = replaceNodeValue(doc, root, "fabric8.profile", "${fabric8-profile}"); if (profile != null) { // we do not want a default name for the profile as the end user should be able to set that value // and use fabric8-profile as key as there is a problem when using fabric8.profile propertyNameSet.put("fabric8-profile", null); } // now lets replace the contents of some elements (adding new elements if they are not present) List<String> beforeNames = Arrays.asList("artifactId", "version", "packaging", "name", "properties", "fabric8-profile"); replaceOrAddElementText(doc, root, "version", "${version}", beforeNames); replaceOrAddElementText(doc, root, "artifactId", "${artifactId}", beforeNames); replaceOrAddElementText(doc, root, "groupId", "${groupId}", beforeNames); } archetypePom.getParentFile().mkdirs(); // remove copyright header which is the first comment, as we do not want that in the archetypes removeCommentNodes(doc); archetypeUtils.writeXmlDocument(doc, archetypePom); // lets update the archetype-metadata.xml file String archetypeXmlText = defaultArchetypeXmlText(); Document archDoc = archetypeUtils.parseXml(new InputSource(new StringReader(archetypeXmlText))); Element archRoot = archDoc.getDocumentElement(); // replace @name attribute on root element archRoot.setAttribute("name", archetypeDir.getName()); LOG.debug(("Found property names: {}"), propertyNameSet); // lets add all the properties Element requiredProperties = replaceOrAddElement(archDoc, archRoot, "requiredProperties", Arrays.asList("fileSets")); // lets add the various properties in for (Map.Entry<String, String> entry : propertyNameSet.entrySet()) { requiredProperties.appendChild(archDoc.createTextNode("\n" + indent + indent)); Element requiredProperty = archDoc.createElement("requiredProperty"); requiredProperties.appendChild(requiredProperty); requiredProperty.setAttribute("key", entry.getKey()); if (entry.getValue() != null) { requiredProperty.appendChild(archDoc.createTextNode("\n" + indent + indent + indent)); Element defaultValue = archDoc.createElement("defaultValue"); requiredProperty.appendChild(defaultValue); defaultValue.appendChild(archDoc.createTextNode(entry.getValue())); } requiredProperty.appendChild(archDoc.createTextNode("\n" + indent + indent)); } requiredProperties.appendChild(archDoc.createTextNode("\n" + indent)); metadataXmlOutFile.getParentFile().mkdirs(); archetypeUtils.writeXmlDocument(archDoc, metadataXmlOutFile); File archetypeProjectPom = new File(archetypeDir, "pom.xml"); // now generate Archetype's pom if (!archetypeProjectPom.exists()) { StringWriter sw = new StringWriter(); IOUtils.copy(getClass().getResourceAsStream("default-archetype-pom.xml"), sw, "UTF-8"); Document pomDocument = archetypeUtils.parseXml(new InputSource(new StringReader(sw.toString()))); List<String> emptyList = Collections.emptyList(); // artifactId = original artifactId with "-archetype" Element artifactId = replaceOrAddElement(pomDocument, pomDocument.getDocumentElement(), "artifactId", emptyList); artifactId.setTextContent(archetypeDir.getName()); // name = "Fabric8 :: Qickstarts :: xxx" -> "Fabric8 :: Archetypes :: xxx" Element name = replaceOrAddElement(pomDocument, pomDocument.getDocumentElement(), "name", emptyList); if (originalName.contains(" :: ")) { String[] originalNameTab = originalName.split(" :: "); if (originalNameTab.length > 2) { StringBuilder sb = new StringBuilder(); sb.append("Fabric8 :: Archetypes"); for (int idx = 2; idx < originalNameTab.length; idx++) { sb.append(" :: ").append(originalNameTab[idx]); } name.setTextContent(sb.toString()); } else { name.setTextContent("Fabric8 :: Archetypes :: " + originalNameTab[1]); } } else { name.setTextContent("Fabric8 :: Archetypes :: " + originalName); } // description = "Creates a new " + originalDescription Element description = replaceOrAddElement(pomDocument, pomDocument.getDocumentElement(), "description", emptyList); description.setTextContent("Creates a new " + originalDescription); archetypeUtils.writeXmlDocument(pomDocument, archetypeProjectPom); } } /** * Remove any comment nodes from the doc (only top level). * <p/> * This is used to remove copyright headers embedded as comment in the pom.xml files etc. */ private void removeCommentNodes(Document doc) { List<Node> toRemove = new ArrayList<>(); NodeList children = doc.getChildNodes(); for (int cn = 0; cn < children.getLength(); cn++) { Node child = children.item(cn); if (Node.COMMENT_NODE == child.getNodeType()) { toRemove.add(child); } } for (Node child : toRemove) { doc.removeChild(child); } } private String replaceNodeValue(Document doc, Element parent, String name, String newValue) { NodeList children = parent.getChildNodes(); for (int cn = 0; cn < children.getLength(); cn++) { Node child = children.item(cn); if (child instanceof Element && child.getNodeName().equals(name)) { Element e = (Element) children.item(cn); String answer = e.getTextContent(); e.setTextContent(newValue); return answer; } else if (child instanceof Element && child.hasChildNodes()) { String answer = replaceNodeValue(doc, (Element) child, name, newValue); if (answer != null) { return answer; } } } return null; } /** * Creates new element as child of <code>parent</code> and sets its text content */ protected Element replaceOrAddElementText(Document doc, Element parent, String name, String content, List<String> beforeNames) { Element element = replaceOrAddElement(doc, parent, name, beforeNames); element.setTextContent(content); return element; } /** * Returns new or existing Element from <code>parent</code> */ private Element replaceOrAddElement(Document doc, Element parent, String name, List<String> beforeNames) { NodeList children = parent.getChildNodes(); List<Element> elements = new LinkedList<Element>(); for (int cn = 0; cn < children.getLength(); cn++) { if (children.item(cn) instanceof Element && children.item(cn).getNodeName().equals(name)) { elements.add((Element) children.item(cn)); } } Element element = null; if (elements.isEmpty()) { Element newElement = doc.createElement(name); Node first = null; for (String n: beforeNames) { first = findChild(parent, n); if (first != null) { break; } } Node node = null; if (first != null) { node = first; } else { node = parent.getFirstChild(); } Text text = doc.createTextNode("\n" + indent); parent.insertBefore(text, node); parent.insertBefore(newElement, text); element = newElement; } else { element = elements.get(0); } return element; } protected void addArchetypeMetaData(File pom, String outputName) throws FileNotFoundException { Document doc = archetypeUtils.parseXml(new InputSource(new FileReader(pom))); Element root = doc.getDocumentElement(); String groupId = "io.fabric8.archetypes"; String artifactId = archetypeUtils.firstElementText(root, "artifactId", outputName); String name = archetypeUtils.firstElementText(root, "name", ""); String description = archetypeUtils.firstElementText(root, "description", ""); String version = ""; NodeList parents = root.getElementsByTagName("parent"); if (parents.getLength() > 0) { version = archetypeUtils.firstElementText((Element) parents.item(0), "version", ""); } if (version.length() == 0) { version = archetypeUtils.firstElementText(root, "version", ""); } String repo = "https://repo.fusesource.com/nexus/content/groups/public"; printWriter.println(String.format(indent + indent + "<archetype>\n" + indent + indent + indent + "<groupId>%s</groupId>\n" + indent + indent + indent + "<artifactId>%s</artifactId>\n" + indent + indent + indent + "<version>%s</version>\n" + indent + indent + indent + "<repository>%s</repository>\n" + indent + indent + indent + "<name>%s</name>\n" + indent + indent + indent + "<description>%s</description>\n" + indent + indent + "</archetype>", groupId, artifactId, version, repo, name, description)); } /** * Checks whether the file contains specific line. Partial matches do not count. */ private boolean fileIncludesLine(File file, String matches) throws IOException { for (String line: FileUtils.readLines(file)) { String trimmed = line.trim(); if (trimmed.equals(matches)) { return true; } } return false; } /** * Copies all java/groovy/kotlin/scala code recursively. <code>replaceFn</code> is used to modify the content of files. */ private void copyCodeFiles(File rootPackage, File outDir, Replacement replaceFn) throws IOException { if (rootPackage.isFile()) { copyFile(rootPackage, outDir, replaceFn); } else { outDir.mkdirs(); String[] names = rootPackage.list(); if (names != null) { for (String name: names) { copyCodeFiles(new File(rootPackage, name), new File(outDir, name), replaceFn); } } } } /** * Copies single file from <code>src</code> to <code>dest</code>. * If the file is source file, variable references will be escaped, so they'll survive Velocity template merging. */ private void copyFile(File src, File dest, Replacement replaceFn) throws IOException { if (replaceFn != null && isSourceFile(src)) { String original = FileUtils.readFileToString(src); String escapeDollarSquiggly = original; if (original.contains("${")) { String replaced = original.replaceAll(Pattern.quote("${"), "\\${D}{"); // add Velocity expression at the beginning of the result file. // Velocity is used by mvn archetype:generate escapeDollarSquiggly = "#set( $D = '$' )\n" + replaced; } // do additional replacement String text = replaceFn.replace(escapeDollarSquiggly); FileUtils.write(dest, text); } else { if (LOG.isDebugEnabled()) { LOG.warn("Not a source dir as the extension is {}", FilenameUtils.getExtension(src.getName())); } FileUtils.copyFile(src, dest); } } /** * Copies all other source files which are not excluded */ private void copyOtherFiles(File projectDir, File srcDir, File outDir, Replacement replaceFn) throws IOException { if (archetypeUtils.isValidFileToCopy(projectDir, srcDir)) { if (srcDir.isFile()) { copyFile(srcDir, outDir, replaceFn); } else { outDir.mkdirs(); String[] names = srcDir.list(); if (names != null) { for (String name: names) { copyOtherFiles(projectDir, new File(srcDir, name), new File(outDir, name), replaceFn); } } } } } private void copyDataFiles(File projectDir, File srcDir, File outDir, Replacement replaceFn) throws IOException { if (srcDir.isFile()) { copyFile(srcDir, outDir, replaceFn); } else { outDir.mkdirs(); String[] names = srcDir.list(); if (names != null) { for (String name: names) { copyDataFiles(projectDir, new File(srcDir, name), new File(outDir, name), replaceFn); } } } } /** * Returns true if this file is a valid source file name */ private boolean isSourceFile(File file) { String name = FilenameUtils.getExtension(file.getName()).toLowerCase(); return sourceFileExtensions.contains(name); } /** * Returns true if this is a valid archetype property name, so excluding basedir and maven "project." names */ protected boolean isValidRequiredPropertyName(String name) { return !name.equals("basedir") && !name.startsWith("project.") && !name.startsWith("pom.") && !name.equals("package"); } protected Node findChild(Element parent, String n) { NodeList children = parent.getChildNodes(); for (int cn = 0; cn < children.getLength(); cn++) { if (n.equals(children.item(cn).getNodeName())) { return children.item(cn); } } return null; } private String defaultArchetypeXmlText() throws IOException { StringWriter sw = new StringWriter(); IOUtils.copy(getClass().getResourceAsStream("default-archetype-descriptor.xml"), sw, "UTF-8"); return sw.toString(); } /** * Interface for (String) => (String) functions */ private static interface Replacement { public String replace(String token); } /** * Identity Replacement. */ private static class IdentityReplacement implements Replacement { public String replace(String token) { return token; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.tests.extras.byteman; import org.apache.activemq.artemis.api.core.ActiveMQBuffer; import org.apache.activemq.artemis.api.core.client.ServerLocator; import org.apache.activemq.artemis.core.protocol.core.impl.PacketImpl; import org.apache.activemq.artemis.jms.client.ActiveMQConnectionFactory; import org.apache.activemq.artemis.tests.util.JMSTestBase; import org.jboss.byteman.contrib.bmunit.BMRule; import org.jboss.byteman.contrib.bmunit.BMRules; import org.jboss.byteman.contrib.bmunit.BMUnitRunner; import org.junit.After; import org.junit.Test; import org.junit.runner.RunWith; import javax.jms.Connection; import javax.jms.ExceptionListener; import javax.jms.JMSException; import javax.jms.Message; import javax.jms.MessageConsumer; import javax.jms.MessageProducer; import javax.jms.Queue; import javax.jms.Session; import javax.jms.TextMessage; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @RunWith(BMUnitRunner.class) public class DisconnectOnCriticalFailureTest extends JMSTestBase { private static AtomicBoolean corruptPacket = new AtomicBoolean(false); @After @Override public void tearDown() throws Exception { corruptPacket.set(false); super.tearDown(); } @Test @BMRules( rules = {@BMRule( name = "Corrupt Decoding", targetClass = "org.apache.activemq.artemis.core.protocol.core.impl.PacketDecoder", targetMethod = "decode(byte)", targetLocation = "ENTRY", action = "org.apache.activemq.artemis.tests.extras.byteman.DisconnectOnCriticalFailureTest.doThrow();")}) public void testSendDisconnect() throws Exception { createQueue("queue1"); final Connection producerConnection = nettyCf.createConnection(); final CountDownLatch latch = new CountDownLatch(1); try { producerConnection.setExceptionListener(new ExceptionListener() { @Override public void onException(JMSException e) { latch.countDown(); } }); corruptPacket.set(true); producerConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); assertTrue(latch.await(5, TimeUnit.SECONDS)); } finally { corruptPacket.set(false); if (producerConnection != null) { producerConnection.close(); } } } @Test @BMRules( rules = {@BMRule( name = "Corrupt Decoding", targetClass = "org.apache.activemq.artemis.core.protocol.ClientPacketDecoder", targetMethod = "decode(org.apache.activemq.artemis.api.core.ActiveMQBuffer)", targetLocation = "ENTRY", action = "org.apache.activemq.artemis.tests.extras.byteman.DisconnectOnCriticalFailureTest.doThrow($1);")}) public void testClientDisconnect() throws Exception { Queue q1 = createQueue("queue1"); final Connection connection = nettyCf.createConnection(); final CountDownLatch latch = new CountDownLatch(1); try { connection.setExceptionListener(new ExceptionListener() { @Override public void onException(JMSException e) { latch.countDown(); } }); Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); MessageProducer producer = session.createProducer(q1); TextMessage m = session.createTextMessage("hello"); producer.send(m); connection.start(); corruptPacket.set(true); MessageConsumer consumer = session.createConsumer(q1); consumer.receive(2000); assertTrue(latch.await(5, TimeUnit.SECONDS)); } finally { corruptPacket.set(false); if (connection != null) { connection.close(); } } } @Test(timeout = 60000) @BMRules( rules = {@BMRule( name = "Corrupt Decoding", targetClass = "org.apache.activemq.artemis.core.protocol.ClientPacketDecoder", targetMethod = "decode(org.apache.activemq.artemis.api.core.ActiveMQBuffer)", targetLocation = "ENTRY", action = "org.apache.activemq.artemis.tests.extras.byteman.DisconnectOnCriticalFailureTest.doThrow($1);")}) public void testClientDisconnectLarge() throws Exception { Queue q1 = createQueue("queue1"); final Connection connection = nettyCf.createConnection(); final CountDownLatch latch = new CountDownLatch(1); ServerLocator locator = ((ActiveMQConnectionFactory)nettyCf).getServerLocator(); int minSize = locator.getMinLargeMessageSize(); StringBuilder builder = new StringBuilder(); for (int i = 0; i < minSize; i++) { builder.append("a"); } try { connection.setExceptionListener(new ExceptionListener() { @Override public void onException(JMSException e) { latch.countDown(); } }); Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); MessageProducer producer = session.createProducer(q1); TextMessage m = session.createTextMessage(builder.toString()); producer.send(m); connection.start(); corruptPacket.set(true); MessageConsumer consumer = session.createConsumer(q1); Message lm = consumer.receive(2000); //first receive won't crash because the packet //is SESS_RECEIVE_LARGE_MSG assertNotNull(lm); //second receive will force server to send a //"forced delivery" message, and will cause //the exception to be thrown. lm = consumer.receive(5000); assertNull(lm); assertTrue(latch.await(5, TimeUnit.SECONDS)); } finally { corruptPacket.set(false); if (connection != null) { connection.close(); } } } public static void doThrow(ActiveMQBuffer buff) { byte type = buff.getByte(buff.readerIndex()); if (corruptPacket.get() && type == PacketImpl.SESS_RECEIVE_MSG) { corruptPacket.set(false); throw new IllegalArgumentException("Invalid type: -84"); } } public static void doThrow() { if (corruptPacket.get()) { corruptPacket.set(false); throw new IllegalArgumentException("Invalid type: -84"); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.mariadb.models; import com.azure.core.annotation.Fluent; import com.azure.core.annotation.JsonFlatten; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Map; /** Parameters allowed to update for a server. */ @JsonFlatten @Fluent public class ServerUpdateParameters { @JsonIgnore private final ClientLogger logger = new ClientLogger(ServerUpdateParameters.class); /* * The SKU (pricing tier) of the server. */ @JsonProperty(value = "sku") private Sku sku; /* * Application-specific metadata in the form of key-value pairs. */ @JsonProperty(value = "tags") private Map<String, String> tags; /* * Storage profile of a server. */ @JsonProperty(value = "properties.storageProfile") private StorageProfile storageProfile; /* * The password of the administrator login. */ @JsonProperty(value = "properties.administratorLoginPassword") private String administratorLoginPassword; /* * The version of a server. */ @JsonProperty(value = "properties.version") private ServerVersion version; /* * Enable ssl enforcement or not when connect to server. */ @JsonProperty(value = "properties.sslEnforcement") private SslEnforcementEnum sslEnforcement; /* * Enforce a minimal Tls version for the server. */ @JsonProperty(value = "properties.minimalTlsVersion") private MinimalTlsVersionEnum minimalTlsVersion; /* * Whether or not public network access is allowed for this server. Value * is optional but if passed in, must be 'Enabled' or 'Disabled' */ @JsonProperty(value = "properties.publicNetworkAccess") private PublicNetworkAccessEnum publicNetworkAccess; /* * The replication role of the server. */ @JsonProperty(value = "properties.replicationRole") private String replicationRole; /** * Get the sku property: The SKU (pricing tier) of the server. * * @return the sku value. */ public Sku sku() { return this.sku; } /** * Set the sku property: The SKU (pricing tier) of the server. * * @param sku the sku value to set. * @return the ServerUpdateParameters object itself. */ public ServerUpdateParameters withSku(Sku sku) { this.sku = sku; return this; } /** * Get the tags property: Application-specific metadata in the form of key-value pairs. * * @return the tags value. */ public Map<String, String> tags() { return this.tags; } /** * Set the tags property: Application-specific metadata in the form of key-value pairs. * * @param tags the tags value to set. * @return the ServerUpdateParameters object itself. */ public ServerUpdateParameters withTags(Map<String, String> tags) { this.tags = tags; return this; } /** * Get the storageProfile property: Storage profile of a server. * * @return the storageProfile value. */ public StorageProfile storageProfile() { return this.storageProfile; } /** * Set the storageProfile property: Storage profile of a server. * * @param storageProfile the storageProfile value to set. * @return the ServerUpdateParameters object itself. */ public ServerUpdateParameters withStorageProfile(StorageProfile storageProfile) { this.storageProfile = storageProfile; return this; } /** * Get the administratorLoginPassword property: The password of the administrator login. * * @return the administratorLoginPassword value. */ public String administratorLoginPassword() { return this.administratorLoginPassword; } /** * Set the administratorLoginPassword property: The password of the administrator login. * * @param administratorLoginPassword the administratorLoginPassword value to set. * @return the ServerUpdateParameters object itself. */ public ServerUpdateParameters withAdministratorLoginPassword(String administratorLoginPassword) { this.administratorLoginPassword = administratorLoginPassword; return this; } /** * Get the version property: The version of a server. * * @return the version value. */ public ServerVersion version() { return this.version; } /** * Set the version property: The version of a server. * * @param version the version value to set. * @return the ServerUpdateParameters object itself. */ public ServerUpdateParameters withVersion(ServerVersion version) { this.version = version; return this; } /** * Get the sslEnforcement property: Enable ssl enforcement or not when connect to server. * * @return the sslEnforcement value. */ public SslEnforcementEnum sslEnforcement() { return this.sslEnforcement; } /** * Set the sslEnforcement property: Enable ssl enforcement or not when connect to server. * * @param sslEnforcement the sslEnforcement value to set. * @return the ServerUpdateParameters object itself. */ public ServerUpdateParameters withSslEnforcement(SslEnforcementEnum sslEnforcement) { this.sslEnforcement = sslEnforcement; return this; } /** * Get the minimalTlsVersion property: Enforce a minimal Tls version for the server. * * @return the minimalTlsVersion value. */ public MinimalTlsVersionEnum minimalTlsVersion() { return this.minimalTlsVersion; } /** * Set the minimalTlsVersion property: Enforce a minimal Tls version for the server. * * @param minimalTlsVersion the minimalTlsVersion value to set. * @return the ServerUpdateParameters object itself. */ public ServerUpdateParameters withMinimalTlsVersion(MinimalTlsVersionEnum minimalTlsVersion) { this.minimalTlsVersion = minimalTlsVersion; return this; } /** * Get the publicNetworkAccess property: Whether or not public network access is allowed for this server. Value is * optional but if passed in, must be 'Enabled' or 'Disabled'. * * @return the publicNetworkAccess value. */ public PublicNetworkAccessEnum publicNetworkAccess() { return this.publicNetworkAccess; } /** * Set the publicNetworkAccess property: Whether or not public network access is allowed for this server. Value is * optional but if passed in, must be 'Enabled' or 'Disabled'. * * @param publicNetworkAccess the publicNetworkAccess value to set. * @return the ServerUpdateParameters object itself. */ public ServerUpdateParameters withPublicNetworkAccess(PublicNetworkAccessEnum publicNetworkAccess) { this.publicNetworkAccess = publicNetworkAccess; return this; } /** * Get the replicationRole property: The replication role of the server. * * @return the replicationRole value. */ public String replicationRole() { return this.replicationRole; } /** * Set the replicationRole property: The replication role of the server. * * @param replicationRole the replicationRole value to set. * @return the ServerUpdateParameters object itself. */ public ServerUpdateParameters withReplicationRole(String replicationRole) { this.replicationRole = replicationRole; return this; } /** * Validates the instance. * * @throws IllegalArgumentException thrown if the instance is not valid. */ public void validate() { if (sku() != null) { sku().validate(); } if (storageProfile() != null) { storageProfile().validate(); } } }
/* * Copyright 2017 James F. Bowring and CIRDLES.org. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.cirdles.squid.shrimp; import java.io.Serializable; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.SortedSet; import static org.cirdles.ludwig.squid25.SquidConstants.SQUID_UPPER_LIMIT_1_SIGMA_PERCENT; /** * @author James F. Bowring */ public class SquidRatiosModel implements Serializable, Comparable<SquidRatiosModel> { private static final long serialVersionUID = -2944080263487487243L; private String ratioName; private SquidSpeciesModel numerator; private SquidSpeciesModel denominator; private int reportingOrderIndex; private List<Double> ratEqTime; private List<Double> ratEqVal; // one sigma absolute uncertainties for ratEqVal private List<Double> ratEqErr; private double ratioVal; // one sigma absolute uncertainty for ratioVal private double ratioFractErr; // July 2019 we add two fields for the ratio and uncertainty // that can be overwritten by the user as in Squid2 "swapped" // and that will be the values supplied to the expression // evaluator for this ratio of interest. These fields will default // to the original ratioVal and ratioFractErr fields. private double ratioValUsed; // one sigma absolute uncertainty for ratioVal private double ratioFractErrUsed; private int minIndex; private boolean active; public SquidRatiosModel(SquidSpeciesModel numerator, SquidSpeciesModel denominator, int reportingOrderIndex) { this.numerator = numerator; this.denominator = denominator; this.reportingOrderIndex = reportingOrderIndex; this.ratioName = numerator.getIsotopeName() + "/" + denominator.getIsotopeName(); this.ratEqTime = new ArrayList<>(); this.ratEqVal = new ArrayList<>(); this.ratEqErr = new ArrayList<>(); this.ratioVal = 0.0; this.ratioFractErr = 0.0; this.ratioValUsed = 0.0; this.ratioFractErrUsed = 0.0; this.minIndex = -2; this.active = false; } @Override public int compareTo(SquidRatiosModel squidRatiosModel) { return Integer.compare(reportingOrderIndex, squidRatiosModel.getReportingOrderIndex()); } @Override public boolean equals(Object squidRatiosModel) { boolean retVal = false; if (squidRatiosModel instanceof SquidRatiosModel) { retVal = reportingOrderIndex == ((SquidRatiosModel) squidRatiosModel).getReportingOrderIndex(); } return retVal; } @Override public int hashCode() { return super.hashCode(); } public SquidRatiosModel copy() { SquidRatiosModel copy = new SquidRatiosModel(numerator, denominator, reportingOrderIndex); return copy; } public static SquidRatiosModel findSquidRatiosModelByName(SortedSet<SquidRatiosModel> isotopicRatios, String ratioName) { SquidRatiosModel retVal = null; Iterator<SquidRatiosModel> iterator = isotopicRatios.iterator(); while (iterator.hasNext()) { SquidRatiosModel model = iterator.next(); if (model.getRatioName().equals(ratioName)) { retVal = model; break; } } return retVal; } /** * @return the ratioName */ public String getRatioName() { return ratioName; } /** * @param ratioName the ratioName to set */ public void setRatioName(String ratioName) { this.ratioName = ratioName; } /** * @return */ public String getDisplayNameNoSpaces() { return numerator.getIsotopeName() + "/" + denominator.getIsotopeName(); } /** * @return the numerator */ public SquidSpeciesModel getNumerator() { return numerator; } /** * @param numerator the numerator to set */ public void setNumerator(SquidSpeciesModel numerator) { this.numerator = numerator; } /** * @return the denominator */ public SquidSpeciesModel getDenominator() { return denominator; } /** * @param denominator the denominator to set */ public void setDenominator(SquidSpeciesModel denominator) { this.denominator = denominator; } /** * @return the reportingOrderIndex */ public int getReportingOrderIndex() { return reportingOrderIndex; } /** * @return the ratEqTime */ public List<Double> getRatEqTime() { return ratEqTime; } /** * @param ratEqTime the ratEqTime to set */ public void setRatEqTime(List<Double> ratEqTime) { this.ratEqTime = ratEqTime; } /** * @return the ratEqVal */ public List<Double> getRatEqVal() { return ratEqVal; } /** * @param ratEqVal the ratEqVal to set */ public void setRatEqVal(List<Double> ratEqVal) { this.ratEqVal = ratEqVal; } /** * @return the ratEqErr */ public List<Double> getRatEqErr() { return ratEqErr; } /** * @param ratEqErr the ratEqErr to set */ public void setRatEqErr(List<Double> ratEqErr) { this.ratEqErr = ratEqErr; } /** * @return the ratioVal */ public double getRatioVal() { return ratioVal; } /** * @param ratioVal the ratioVal to set */ public void setRatioVal(double ratioVal) { this.ratioVal = ratioVal; this.ratioValUsed = ratioVal; } /** * @return the ratioFractErr */ public double getRatioFractErr() { return ratioFractErr; } /** * @return the ratioFractErr */ public double getRatioFractErrUsedAsOneSigmaPercent() { // use of getters provides backward compatibility return StrictMath.abs(getRatioFractErrUsed() / getRatioValUsed() * 100.0); } /** * @param ratioFractErr the ratioFractErr to set */ public void setRatioFractErr(double ratioFractErr) { // april 2017 introduce Squid2.5 upper limit // the value supplied is the 1 sigma percent uncertainty divided by 100 // we choose to store the 1 sigma absolute as ratioFracErr // first determine if above Squid25 limits double ratioFraErrFiltered = ((StrictMath.abs(ratioFractErr) * 100.0) > SQUID_UPPER_LIMIT_1_SIGMA_PERCENT) ? (SQUID_UPPER_LIMIT_1_SIGMA_PERCENT / 100.0) : StrictMath.abs(ratioFractErr); this.ratioFractErr = ratioFraErrFiltered * ratioVal; this.ratioFractErrUsed = ratioFraErrFiltered * ratioVal; } /** * @return the ratioValUsed */ public double getRatioValUsed() { if (ratioValUsed == 0.0) { ratioValUsed = ratioVal; } return ratioValUsed; } /** * @param ratioValUsed the ratioValUsed to set */ public void setRatioValUsed(double ratioValUsed) { this.ratioValUsed = ratioValUsed; } /** * @return the ratioFractErrUsed */ public double getRatioFractErrUsed() { if (ratioFractErrUsed == 0.0) { ratioFractErrUsed = ratioFractErr; } return ratioFractErrUsed; } /** * @param ratioFractErrUsed the ratioFractErrUsed to set */ public void setRatioFractErrUsed(double ratioFractErrUsed) { this.ratioFractErrUsed = ratioFractErrUsed; } /** * Restores original ratio and uncertainty to fields used by expression evaluator */ public void restoreRatioValueAndUnct() { this.ratioValUsed = ratioVal; this.ratioFractErrUsed = ratioFractErr; } /** * @return the minIndex */ public int getMinIndex() { return minIndex; } /** * @param minIndex the minIndex to set */ public void setMinIndex(int minIndex) { this.minIndex = minIndex; } /** * @return the active */ public boolean isActive() { return active; } /** * @param active the active to set */ public void setActive(boolean active) { this.active = active; } }
package com.codepath.apps.mytwitter.adapter; import android.content.Context; import android.support.v4.graphics.drawable.RoundedBitmapDrawable; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageButton; import android.widget.ImageView; import android.widget.TextView; import android.widget.Toast; import com.bumptech.glide.Glide; import com.codepath.apps.mytwitter.R; import com.codepath.apps.mytwitter.Utity.DateUtity; import com.codepath.apps.mytwitter.models.Tweet; import java.util.ArrayList; import java.util.List; import butterknife.BindView; import butterknife.ButterKnife; import butterknife.OnClick; import jp.wasabeef.glide.transformations.RoundedCornersTransformation; /** * Created by Admin on 7/3/2017. */ public class TweetAdapter extends RecyclerView.Adapter<RecyclerView.ViewHolder> { private List<Tweet> mTweets; private static final int NO_IMAGE = 0; private static final int HAVE_IMAGE =1; private Context mContext; private Listener mListener; private int indexItem = 0; public void setmListener(Listener mListener) { this.mListener = mListener; } public TweetAdapter(Context context) { mContext = context; mTweets = new ArrayList<>(); } public void setData(List<Tweet> tweets) { mTweets.clear(); mTweets.addAll(tweets); notifyDataSetChanged(); } public void addData(List<Tweet> tweets) { int pos = mTweets.size(); mTweets.addAll(pos,tweets); notifyItemRangeInserted(pos,mTweets.size()); } public void updateData(Tweet tweet) { mTweets.set(indexItem,tweet); notifyItemChanged(indexItem); } @Override public int getItemViewType(int position) { Tweet tweet = mTweets.get(position); if (tweet.getMedia() != null) { return HAVE_IMAGE; } return NO_IMAGE; } @Override public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { if (viewType == HAVE_IMAGE) { View view = LayoutInflater.from(mContext).inflate(R.layout.item_image,parent,false); return new TweetImage(view); } else { View view = LayoutInflater.from(mContext).inflate(R.layout.item_no_image,parent,false); return new TweetNoImage(view); } } @Override public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) { final Tweet tweet = mTweets.get(position); if (holder instanceof TweetImage) { bindViewTweetImage((TweetImage)holder,tweet); } else if (holder instanceof TweetNoImage) { bindViewTweetNoImage((TweetNoImage)holder,tweet); } holder.itemView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (mListener != null) { mListener.onClickItem(tweet); } } }); if (position == mTweets.size() -1) { mListener.onLoadMore(); } } private void bindViewTweetNoImage(TweetNoImage holder, Tweet tweet) { holder.tvContent.setText(tweet.getContent()); holder.tvName.setText(tweet.getUser().getName()); holder.tvNickName.setText("@"+tweet.getUser().getScreenName()); holder.tvNumberOfLike.setText(tweet.getFavouritecount()+""); holder.tvNumberOfShare.setText(tweet.getRetweetcount()+""); holder.tvTime.setText(DateUtity.parseDate(tweet.getCreateAt())); holder.ibLike.setImageResource(( tweet.isFavorite() ? R.drawable.ic_like : R.drawable.ic_no_like)); holder.ibShare.setImageResource(tweet.isRetweet() ? R.drawable.ic_repeat : R.drawable.ic_no_repeat); holder.ibLike.setOnClickListener( v -> { indexItem = mTweets.indexOf(tweet); if (tweet.isFavorite()) { mListener.unLike(tweet.getId()); } else { mListener.onLike(tweet.getId()); } }); holder.ibShare.setOnClickListener( v-> { indexItem = mTweets.indexOf(tweet); if (!tweet.isRetweet()) { mListener.onShowDialogShare(tweet); } }); Glide.with(mContext) .load(tweet.getUser().getUrlAvatar()) .bitmapTransform(new RoundedCornersTransformation(mContext,3,2)) .into(holder.ivAvatar); } private void bindViewTweetImage(TweetImage holder, Tweet tweet) { holder.tvContent.setText(tweet.getContent()); holder.tvName.setText(tweet.getUser().getName()); holder.tvNickName.setText("@"+tweet.getUser().getScreenName()); holder.tvNumberOfLike.setText(tweet.getFavouritecount()+""); holder.tvNumberOfShare.setText(tweet.getRetweetcount()+""); holder.tvTime.setText(DateUtity.parseDate(tweet.getCreateAt())); holder.ibLike.setImageResource(( tweet.isFavorite() ? R.drawable.ic_like : R.drawable.ic_no_like)); holder.ibShare.setImageResource(tweet.isRetweet() ? R.drawable.ic_repeat : R.drawable.ic_no_repeat); holder.ibLike.setOnClickListener( v -> { indexItem = mTweets.indexOf(tweet); if (tweet.isFavorite()) { mListener.unLike(tweet.getId()); } else { mListener.onLike(tweet.getId()); } }); holder.ibShare.setOnClickListener( v-> { indexItem = mTweets.indexOf(tweet); mListener.onShowDialogShare(tweet);}); Glide.with(mContext) .load(tweet.getUser().getUrlAvatar()) .bitmapTransform(new RoundedCornersTransformation(mContext,3,2)) .into(holder.ivAvatar); Glide.with(mContext) .load(tweet.getMedia().getUrl()) .bitmapTransform(new RoundedCornersTransformation(mContext,10,2)) .into(holder.ivIamge); } @Override public int getItemCount() { return mTweets.size(); } public void addOneTweet(Tweet tweet) { mTweets.add(0,tweet); notifyItemChanged(0); } public interface Listener { void onClickItem(Tweet tweet); void onLoadMore(); void onLike(long idTweet); void unLike(long idTweet); void onShowDialogShare(Tweet tweet); void onReply(long idTweet); } class TweetNoImage extends RecyclerView.ViewHolder { @BindView(R.id.ibLike) ImageButton ibLike; @BindView(R.id.ibReply) ImageButton ibReply; @BindView(R.id.ibShare) ImageButton ibShare; @BindView(R.id.tvContent) TextView tvContent; @BindView(R.id.tvNumberOfShare) TextView tvNumberOfShare; @BindView(R.id.tvLike) TextView tvNumberOfLike; @BindView(R.id.tvName) TextView tvName; @BindView(R.id.tvNickName) TextView tvNickName; @BindView(R.id.tvTime) TextView tvTime; @BindView(R.id.ivAvatar) ImageView ivAvatar; public TweetNoImage(View itemView) { super(itemView); ButterKnife.bind(this,itemView); } } class TweetImage extends RecyclerView.ViewHolder { @BindView(R.id.ibLike) ImageButton ibLike; @BindView(R.id.ibReply) ImageButton ibReply; @BindView(R.id.ibShare) ImageButton ibShare; @BindView(R.id.tvContent) TextView tvContent; @BindView(R.id.tvNumberOfShare) TextView tvNumberOfShare; @BindView(R.id.tvLike) TextView tvNumberOfLike; @BindView(R.id.tvName) TextView tvName; @BindView(R.id.tvNickName) TextView tvNickName; @BindView(R.id.tvTime) TextView tvTime; @BindView(R.id.ivAvatar) ImageView ivAvatar; @BindView(R.id.ivImage) ImageView ivIamge; public TweetImage(View itemView) { super(itemView); ButterKnife.bind(this,itemView); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.router.webapp; import static javax.servlet.http.HttpServletResponse.SC_NO_CONTENT; import static javax.servlet.http.HttpServletResponse.SC_OK; import java.io.IOException; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.ResponseBuilder; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.server.resourcemanager.webapp.RMWebAppUtil; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppsInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.ClusterMetricsInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NodeInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NodesInfo; import org.apache.hadoop.yarn.server.uam.UnmanagedApplicationManager; import org.apache.hadoop.yarn.webapp.BadRequestException; import org.apache.hadoop.yarn.webapp.ForbiddenException; import org.apache.hadoop.yarn.webapp.NotFoundException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.sun.jersey.api.ConflictException; import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.WebResource; import com.sun.jersey.api.client.WebResource.Builder; import com.sun.jersey.core.util.MultivaluedMapImpl; /** * The Router webservice util class. */ public final class RouterWebServiceUtil { private static String user = "YarnRouter"; private static final Logger LOG = LoggerFactory.getLogger(RouterWebServiceUtil.class.getName()); private final static String PARTIAL_REPORT = "Partial Report "; /** Disable constructor. */ private RouterWebServiceUtil() { } /** * Creates and performs a REST call to a specific WebService. * * @param webApp the address of the remote webap * @param hsr the servlet request * @param returnType the return type of the REST call * @param <T> Type of return object. * @param method the HTTP method of the REST call * @param targetPath additional path to add to the webapp address * @param formParam the form parameters as input for a specific REST call * @param additionalParam the query parameters as input for a specific REST * call in case the call has no servlet request * @return the retrieved entity from the REST call */ protected static <T> T genericForward( final String webApp, final HttpServletRequest hsr, final Class<T> returnType, final HTTPMethods method, final String targetPath, final Object formParam, final Map<String, String[]> additionalParam) { UserGroupInformation callerUGI = null; if (hsr != null) { callerUGI = RMWebAppUtil.getCallerUserGroupInformation(hsr, true); } else { // user not required callerUGI = UserGroupInformation.createRemoteUser(user); } if (callerUGI == null) { LOG.error("Unable to obtain user name, user not authenticated"); return null; } try { return callerUGI.doAs(new PrivilegedExceptionAction<T>() { @SuppressWarnings("unchecked") @Override public T run() { Map<String, String[]> paramMap = null; // We can have hsr or additionalParam. There are no case with both. if (hsr != null) { paramMap = hsr.getParameterMap(); } else if (additionalParam != null) { paramMap = additionalParam; } ClientResponse response = RouterWebServiceUtil.invokeRMWebService( webApp, targetPath, method, (hsr == null) ? null : hsr.getPathInfo(), paramMap, formParam, getMediaTypeFromHttpServletRequest(hsr, returnType)); if (Response.class.equals(returnType)) { return (T) RouterWebServiceUtil.clientResponseToResponse(response); } // YARN RM can answer with Status.OK or it throws an exception if (response.getStatus() == SC_OK) { return response.getEntity(returnType); } if (response.getStatus() == SC_NO_CONTENT) { try { return returnType.getConstructor().newInstance(); } catch (RuntimeException | ReflectiveOperationException e) { LOG.error("Cannot create empty entity for {}", returnType, e); } } RouterWebServiceUtil.retrieveException(response); return null; } }); } catch (InterruptedException e) { return null; } catch (IOException e) { return null; } } /** * Performs an invocation of a REST call on a remote RMWebService. * * @param additionalParam */ private static ClientResponse invokeRMWebService(String webApp, String path, HTTPMethods method, String additionalPath, Map<String, String[]> queryParams, Object formParam, String mediaType) { Client client = Client.create(); WebResource webResource = client.resource(webApp).path(path); if (additionalPath != null && !additionalPath.isEmpty()) { webResource = webResource.path(additionalPath); } if (queryParams != null && !queryParams.isEmpty()) { MultivaluedMap<String, String> paramMap = new MultivaluedMapImpl(); for (Entry<String, String[]> param : queryParams.entrySet()) { String[] values = param.getValue(); for (int i = 0; i < values.length; i++) { paramMap.add(param.getKey(), values[i]); } } webResource = webResource.queryParams(paramMap); } Builder builder = null; if (formParam != null) { builder = webResource.entity(formParam, mediaType); builder = builder.accept(mediaType); } else { builder = webResource.accept(mediaType); } ClientResponse response = null; switch (method) { case DELETE: response = builder.delete(ClientResponse.class); break; case GET: response = builder.get(ClientResponse.class); break; case POST: response = builder.post(ClientResponse.class); break; case PUT: response = builder.put(ClientResponse.class); break; default: break; } return response; } public static Response clientResponseToResponse(ClientResponse r) { if (r == null) { return null; } // copy the status code ResponseBuilder rb = Response.status(r.getStatus()); // copy all the headers for (Entry<String, List<String>> entry : r.getHeaders().entrySet()) { for (String value : entry.getValue()) { rb.header(entry.getKey(), value); } } // copy the entity rb.entity(r.getEntityInputStream()); // return the response return rb.build(); } public static void retrieveException(ClientResponse response) { String serverErrorMsg = response.getEntity(String.class); int status = response.getStatus(); if (status == 400) { throw new BadRequestException(serverErrorMsg); } if (status == 403) { throw new ForbiddenException(serverErrorMsg); } if (status == 404) { throw new NotFoundException(serverErrorMsg); } if (status == 409) { throw new ConflictException(serverErrorMsg); } } /** * Merges a list of AppInfo grouping by ApplicationId. Our current policy is * to merge the application reports from the reacheable SubClusters. Via * configuration parameter, we decide whether to return applications for which * the primary AM is missing or to omit them. * * @param appsInfo a list of AppInfo to merge * @param returnPartialResult if the merge AppsInfo should contain partial * result or not * @return the merged AppsInfo */ public static AppsInfo mergeAppsInfo(ArrayList<AppInfo> appsInfo, boolean returnPartialResult) { AppsInfo allApps = new AppsInfo(); Map<String, AppInfo> federationAM = new HashMap<String, AppInfo>(); Map<String, AppInfo> federationUAMSum = new HashMap<String, AppInfo>(); for (AppInfo a : appsInfo) { // Check if this AppInfo is an AM if (a.getAMHostHttpAddress() != null) { // Insert in the list of AM federationAM.put(a.getAppId(), a); // Check if there are any UAM found before if (federationUAMSum.containsKey(a.getAppId())) { // Merge the current AM with the found UAM mergeAMWithUAM(a, federationUAMSum.get(a.getAppId())); // Remove the sum of the UAMs federationUAMSum.remove(a.getAppId()); } // This AppInfo is an UAM } else { if (federationAM.containsKey(a.getAppId())) { // Merge the current UAM with its own AM mergeAMWithUAM(federationAM.get(a.getAppId()), a); } else if (federationUAMSum.containsKey(a.getAppId())) { // Merge the current UAM with its own UAM and update the list of UAM federationUAMSum.put(a.getAppId(), mergeUAMWithUAM(federationUAMSum.get(a.getAppId()), a)); } else { // Insert in the list of UAM federationUAMSum.put(a.getAppId(), a); } } } // Check the remaining UAMs are depending or not from federation for (AppInfo a : federationUAMSum.values()) { if (returnPartialResult || (a.getName() != null && !(a.getName().startsWith(UnmanagedApplicationManager.APP_NAME) || a.getName().startsWith(PARTIAL_REPORT)))) { federationAM.put(a.getAppId(), a); } } allApps.addAll(new ArrayList<AppInfo>(federationAM.values())); return allApps; } private static AppInfo mergeUAMWithUAM(AppInfo uam1, AppInfo uam2) { AppInfo partialReport = new AppInfo(); partialReport.setAppId(uam1.getAppId()); partialReport.setName(PARTIAL_REPORT + uam1.getAppId()); // We pick the status of the first uam partialReport.setState(uam1.getState()); // Merge the newly partial AM with UAM1 and then with UAM2 mergeAMWithUAM(partialReport, uam1); mergeAMWithUAM(partialReport, uam2); return partialReport; } private static void mergeAMWithUAM(AppInfo am, AppInfo uam) { am.setPreemptedResourceMB( am.getPreemptedResourceMB() + uam.getPreemptedResourceMB()); am.setPreemptedResourceVCores( am.getPreemptedResourceVCores() + uam.getPreemptedResourceVCores()); am.setNumNonAMContainerPreempted(am.getNumNonAMContainerPreempted() + uam.getNumNonAMContainerPreempted()); am.setNumAMContainerPreempted( am.getNumAMContainerPreempted() + uam.getNumAMContainerPreempted()); am.setPreemptedMemorySeconds( am.getPreemptedMemorySeconds() + uam.getPreemptedMemorySeconds()); am.setPreemptedVcoreSeconds( am.getPreemptedVcoreSeconds() + uam.getPreemptedVcoreSeconds()); if (am.getState() == YarnApplicationState.RUNNING && uam.getState() == am.getState()) { am.getResourceRequests().addAll(uam.getResourceRequests()); am.setAllocatedMB(am.getAllocatedMB() + uam.getAllocatedMB()); am.setAllocatedVCores(am.getAllocatedVCores() + uam.getAllocatedVCores()); am.setReservedMB(am.getReservedMB() + uam.getReservedMB()); am.setReservedVCores(am.getReservedVCores() + uam.getReservedMB()); am.setRunningContainers( am.getRunningContainers() + uam.getRunningContainers()); am.setMemorySeconds(am.getMemorySeconds() + uam.getMemorySeconds()); am.setVcoreSeconds(am.getVcoreSeconds() + uam.getVcoreSeconds()); } } /** * Deletes all the duplicate NodeInfo by discarding the old instances. * * @param nodes a list of NodeInfo to check for duplicates * @return a NodesInfo that contains a list of NodeInfos without duplicates */ public static NodesInfo deleteDuplicateNodesInfo(ArrayList<NodeInfo> nodes) { NodesInfo nodesInfo = new NodesInfo(); Map<String, NodeInfo> nodesMap = new LinkedHashMap<>(); for (NodeInfo node : nodes) { String nodeId = node.getNodeId(); // If the node already exists, it could be an old instance if (nodesMap.containsKey(nodeId)) { // Check if the node is an old instance if (nodesMap.get(nodeId).getLastHealthUpdate() < node .getLastHealthUpdate()) { nodesMap.put(node.getNodeId(), node); } } else { nodesMap.put(node.getNodeId(), node); } } nodesInfo.addAll(new ArrayList<NodeInfo>(nodesMap.values())); return nodesInfo; } /** * Adds all the values from the second ClusterMetricsInfo to the first one. * * @param metrics the ClusterMetricsInfo we want to update * @param metricsResponse the ClusterMetricsInfo we want to add to the first * param */ public static void mergeMetrics(ClusterMetricsInfo metrics, ClusterMetricsInfo metricsResponse) { metrics.setAppsSubmitted( metrics.getAppsSubmitted() + metricsResponse.getAppsSubmitted()); metrics.setAppsCompleted( metrics.getAppsCompleted() + metricsResponse.getAppsCompleted()); metrics.setAppsPending( metrics.getAppsPending() + metricsResponse.getAppsPending()); metrics.setAppsRunning( metrics.getAppsRunning() + metricsResponse.getAppsRunning()); metrics.setAppsFailed( metrics.getAppsFailed() + metricsResponse.getAppsFailed()); metrics.setAppsKilled( metrics.getAppsKilled() + metricsResponse.getAppsKilled()); metrics.setReservedMB( metrics.getReservedMB() + metricsResponse.getReservedMB()); metrics.setAvailableMB( metrics.getAvailableMB() + metricsResponse.getAvailableMB()); metrics.setAllocatedMB( metrics.getAllocatedMB() + metricsResponse.getAllocatedMB()); metrics.setReservedVirtualCores(metrics.getReservedVirtualCores() + metricsResponse.getReservedVirtualCores()); metrics.setAvailableVirtualCores(metrics.getAvailableVirtualCores() + metricsResponse.getAvailableVirtualCores()); metrics.setAllocatedVirtualCores(metrics.getAllocatedVirtualCores() + metricsResponse.getAllocatedVirtualCores()); metrics.setContainersAllocated(metrics.getContainersAllocated() + metricsResponse.getContainersAllocated()); metrics.setContainersReserved(metrics.getReservedContainers() + metricsResponse.getReservedContainers()); metrics.setContainersPending(metrics.getPendingContainers() + metricsResponse.getPendingContainers()); metrics.setTotalMB(metrics.getTotalMB() + metricsResponse.getTotalMB()); metrics.setTotalVirtualCores(metrics.getTotalVirtualCores() + metricsResponse.getTotalVirtualCores()); metrics.setTotalNodes(metrics.getTotalNodes() + metricsResponse.getTotalNodes()); metrics.setLostNodes(metrics.getLostNodes() + metricsResponse.getLostNodes()); metrics.setUnhealthyNodes(metrics.getUnhealthyNodes() + metricsResponse.getUnhealthyNodes()); metrics.setDecommissioningNodes(metrics.getDecommissioningNodes() + metricsResponse.getDecommissioningNodes()); metrics.setDecommissionedNodes(metrics.getDecommissionedNodes() + metricsResponse.getDecommissionedNodes()); metrics.setRebootedNodes(metrics.getRebootedNodes() + metricsResponse.getRebootedNodes()); metrics.setActiveNodes(metrics.getActiveNodes() + metricsResponse.getActiveNodes()); metrics.setShutdownNodes(metrics.getShutdownNodes() + metricsResponse.getShutdownNodes()); } /** * Extract from HttpServletRequest the MediaType in output. */ protected static <T> String getMediaTypeFromHttpServletRequest( HttpServletRequest request, final Class<T> returnType) { if (request == null) { // By default we return XML for REST call without HttpServletRequest return MediaType.APPLICATION_XML; } // TODO if (!returnType.equals(Response.class)) { return MediaType.APPLICATION_XML; } String header = request.getHeader(HttpHeaders.ACCEPT); if (header == null || header.equals("*")) { // By default we return JSON return MediaType.APPLICATION_JSON; } return header; } }
package nl.tno.sensorstorm.storm; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import nl.tno.sensorstorm.api.annotation.OperationDeclaration; import nl.tno.sensorstorm.api.particles.DataParticle; import nl.tno.sensorstorm.api.particles.MetaParticle; import nl.tno.sensorstorm.api.particles.Particle; import nl.tno.sensorstorm.api.processing.Batcher; import nl.tno.sensorstorm.api.processing.Operation; import nl.tno.sensorstorm.api.processing.ParticleBatchOperation; import nl.tno.sensorstorm.api.processing.SingleParticleOperation; import nl.tno.sensorstorm.config.EmptyStormConfiguration; import nl.tno.sensorstorm.impl.FlushingSyncBuffer; import nl.tno.sensorstorm.impl.MetaParticleUtil; import nl.tno.sensorstorm.impl.OperationManager; import nl.tno.sensorstorm.impl.SyncBuffer; import nl.tno.sensorstorm.particlemapper.ParticleMapper; import nl.tno.storm.configuration.api.ExternalStormConfiguration; import nl.tno.storm.configuration.api.StormConfigurationException; import nl.tno.storm.configuration.impl.ZookeeperStormConfigurationFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import backtype.storm.Config; import backtype.storm.metric.api.CountMetric; import backtype.storm.task.OutputCollector; import backtype.storm.task.TopologyContext; import backtype.storm.topology.OutputFieldsDeclarer; import backtype.storm.topology.base.BaseRichBolt; import backtype.storm.tuple.Fields; import backtype.storm.tuple.Tuple; /** * This is a generic Bolt for the SensorStorm library. There are several * configurations of this bolt possible. There are different constructors * different configurations. On top a {@link SensorStormBolt} you can run an * {@link Operation} and optionally a {@link Batcher}. * <p> * You can use this Bolt without a {@link Batcher}. In that case you can run a * {@link SingleParticleOperation} on top of this Bolt. * <p> * You can also put a {@link Batcher} on this bolt. In that case you have to run * a {@link ParticleBatchOperation} on top of this Bolt. * <p> * Additionally, you can choose to have one instance of the {@link Operation} * (and possibly the {@link Batcher}), or you can have a separate * {@link Operation} (and possible {@link Batcher}) for each value of a field in * the {@link Tuple} (e.g. the sensor id). If you do this, you usually want to * use a {@link SensorStormFieldGrouping} with the same field name on the Spout * or Bolt before this Bolt. */ public class SensorStormBolt extends BaseRichBolt { // ///////////// // // Static fields // // ///////////// // private static final long serialVersionUID = -5109656134961759532L; private static final Logger logger = LoggerFactory .getLogger(SensorStormBolt.class); private static final int TIME_BUCKET_SIZE_IN_SECS = 10; private static final int DEFAULT_SYNC_BUFFER_SIZE = 1000; // ///////////////////////// // // Fields set in constructor // // ///////////////////////// // protected long syncBufferSize; protected Class<? extends Batcher> batcherClass; protected Class<? extends Operation> operationClass; protected String fieldGrouperId; protected Fields metaParticleFields; // ///////////////////// // // Fields set in prepare // // ///////////////////// // @SuppressWarnings("rawtypes") protected transient Map stormNativeConfig; protected transient OutputCollector collector; protected transient String originId; protected transient ExternalStormConfiguration zookeeperStormConfiguration; protected transient SyncBuffer syncBuffer; protected transient Map<String, OperationManager> operationManagers; protected transient Map<Particle, String> fieldGrouperValues; protected transient CountMetric bufferRejectMetric; // //////////// // // Other fields // // //////////// // protected int nrOfOutputFields; /** * Construct a {@link SensorStormBolt} with a {@link Batcher}. * * @param config * Storm configuration map * @param syncBufferSize * size of the SyncBuffer in milliseconds * @param batcherClass * {@link Class} of the {@link Batcher} implementation * @param batchOperationClass * {@link Class} of the {@link ParticleBatchOperation} * implementation * @param fieldGrouperId * The field name on which this bolt the operations should * instantiated. To specify an single operation, one instance of * the operation class for all particles, the fieldGrouper must * be null. * @throws NullPointerException * When batchOperationClass or batchOperationClass is null * @throws IllegalArgumentException * When the {@link Operation} doesn't have an * {@link OperationDeclaration} annotation */ public SensorStormBolt(Config config, long syncBufferSize, Class<? extends Batcher> batcherClass, Class<? extends ParticleBatchOperation> batchOperationClass, String fieldGrouperId) { if (batcherClass == null) { throw new NullPointerException("batcherClass"); } if (batchOperationClass == null) { throw new NullPointerException("batchOperationClass"); } sensorStormBolt(config, syncBufferSize, batcherClass, batchOperationClass, fieldGrouperId); } /** * Construct a {@link SensorStormBolt} without a {@link Batcher}. * * @param config * Storm configuration map * @param syncBufferSize * size of the SyncBuffer in milliseconds * @param singleOperationClass * {@link Class} of the {@link Operation} implementation * @param fieldGrouperId * The field name on which this bolt the operations should * instantiated. To specify an single operation, one instance of * the operation class for all particles, the fieldGrouper must * be null. * @throws NullPointerException * When singleOperationClass is null * @throws IllegalArgumentException * When the {@link Operation} doesn't have an * {@link OperationDeclaration} annotation */ public SensorStormBolt(Config config, long syncBufferSize, Class<? extends SingleParticleOperation> singleOperationClass, String fieldGrouperId) { if (singleOperationClass == null) { throw new NullPointerException("singleOperationClass"); } sensorStormBolt(config, syncBufferSize, null, singleOperationClass, fieldGrouperId); } /** * Construct a {@link SensorStormBolt} with a {@link Batcher} and a default * SyncBuffer size of 1000 milliseconds. * * @param config * Storm configuration map * @param batcherClass * {@link Class} of the {@link Batcher} implementation * @param batchOperationClass * {@link Class} of the {@link ParticleBatchOperation} * implementation * @param fieldGrouperId * The field name on which this bolt the operations should * instantiated. To specify an single operation, one instance of * the operation class for all particles, the fieldGrouper must * be null. * @throws NullPointerException * When batcherClass or batchOperationClass is null * @throws IllegalArgumentException * When the {@link Operation} doesn't have an * {@link OperationDeclaration} annotation */ public SensorStormBolt(Config config, Class<? extends Batcher> batcherClass, Class<? extends ParticleBatchOperation> batchOperationClass, String fieldGrouperId) { if (batcherClass == null) { throw new NullPointerException("batcherClass"); } if (batchOperationClass == null) { throw new NullPointerException("batchOperationClass"); } sensorStormBolt(config, DEFAULT_SYNC_BUFFER_SIZE, batcherClass, batchOperationClass, fieldGrouperId); } /** * Construct a {@link SensorStormBolt} without a {@link Batcher} and a * default SyncBuffer size of 1000 milliseconds. * * @param config * Storm configuration map * @param singleOperationClass * {@link Class} of the {@link Operation} implementation * @param fieldGrouperId * The field name on which this bolt the operations should * instantiated. To specify an single operation, one instance of * the operation class for all particles, the fieldGrouper must * be null. * @throws NullPointerException * When singleOperationClass is null * @throws IllegalArgumentException * When the {@link Operation} doesn't have an * {@link OperationDeclaration} annotation */ public SensorStormBolt(Config config, Class<? extends SingleParticleOperation> singleOperationClass, String fieldGrouperId) { if (singleOperationClass == null) { throw new NullPointerException("singleOperationClass"); } sensorStormBolt(config, DEFAULT_SYNC_BUFFER_SIZE, null, singleOperationClass, fieldGrouperId); } /** * General logic for the constructors of the {@link SingleParticleOperation} * and the {@link ParticleBatchOperation}. * * @param config * Storm configuration map * @param syncBufferSize * Size of the SyncBuffer in milliseconds * @param batcherClass * Class of the {@link Batcher} * @param operationClass * Class of the {@link Operation} * @param fieldGrouperId * Field name to group on * @throws IllegalArgumentException * When the {@link Operation} doesn't have an * {@link OperationDeclaration} annotation */ private void sensorStormBolt(Config config, long syncBufferSize, Class<? extends Batcher> batcherClass, Class<? extends Operation> operationClass, String fieldGrouperId) { // Set fields this.syncBufferSize = syncBufferSize; this.batcherClass = batcherClass; this.operationClass = operationClass; this.fieldGrouperId = fieldGrouperId; // Check annotations if (!operationClass.isAnnotationPresent(OperationDeclaration.class)) { throw new IllegalArgumentException("The Operation " + operationClass.getName() + " does not have an OperationDecleration"); } // Initialize data structures metaParticleFields = MetaParticleUtil .registerMetaParticleFieldsFromOperationClass(config, operationClass); } @Override public void prepare(@SuppressWarnings("rawtypes") Map stormNativeConfig, TopologyContext context, OutputCollector collector) { this.stormNativeConfig = stormNativeConfig; this.collector = collector; originId = operationClass.getName() + "." + context.getThisTaskIndex(); // connect to the zoopkeeper configuration try { zookeeperStormConfiguration = ZookeeperStormConfigurationFactory .getInstance().getStormConfiguration(stormNativeConfig); } catch (StormConfigurationException e) { logger.error("Can not connect to zookeeper for get Storm configuration. Reason: " + e.getMessage()); // create empty config to avoid errors zookeeperStormConfiguration = new EmptyStormConfiguration(); } String msg = "SensorStormBolt instance created for"; if (fieldGrouperId == null) { msg = msg + " a single_instance operation class \"" + operationClass.getName() + "\""; } else { msg = msg + " a fieldGrouping operation class \"" + operationClass.getName() + "\" grouped on tuple field \"" + fieldGrouperId + "\""; } if (batcherClass != null) { msg = msg + ", with a batcher class \"" + batcherClass.getName() + "\""; } else { msg = msg + ", with no batcher."; } logger.info(msg); syncBuffer = new FlushingSyncBuffer(syncBufferSize); operationManagers = new HashMap<String, OperationManager>(); fieldGrouperValues = new HashMap<Particle, String>(); bufferRejectMetric = new CountMetric(); context.registerMetric("syncbuffer_rejects", bufferRejectMetric, TIME_BUCKET_SIZE_IN_SECS); } /** * Handle the new incoming tuple. */ @Override public void execute(Tuple originalTuple) { // Map the Tuple to a Particle // FYI: ParticleMapper will log an error if it is not able to map Particle inputParticle = ParticleMapper.tupleToParticle(originalTuple); if (fieldGrouperId != null) { fieldGrouperValues.put(inputParticle, originalTuple.getStringByField(fieldGrouperId)); } if (inputParticle != null) { // Push the particle through the SyncBuffer try { List<Particle> particlesToProcess = syncBuffer .pushParticle(inputParticle); // Process the particles from the buffer (if any) for (Particle particle : particlesToProcess) { if (particle instanceof MetaParticle) { List<Particle> outputParticles = processMetaParticle((MetaParticle) particle); // Emit new particles (if any) emitParticles(originalTuple, outputParticles); // Pass through the current MetaParticle emitParticle(originalTuple, particle); } else if (particle instanceof DataParticle) { List<Particle> outputParticles = processDataParticle((DataParticle) particle); emitParticles(originalTuple, outputParticles); } else { // This is not a MetaParticle and not a DataParticle logger.error("Unknown particle type, not a MetaParticle or a DataParticle, but a " + particle.getClass().getName()); } } } catch (IllegalArgumentException e) { bufferRejectMetric.incr(); logger.warn("Particle with timestamp " + inputParticle.getTimestamp() + " was rejected from SyncBuffer of bolt " + originId); } } // Always acknowledge tuples collector.ack(originalTuple); } /** * Process a single DataParticle. This method sends the DataParticle to * appropriate {@link OperationManager}. * * @param inputParticle * originalTuple mapped to a DataParticle * @return List of output particles */ private List<Particle> processDataParticle(DataParticle inputParticle) { // deliver dataParticle to the correct operationManager // get an operation manager based on the value of the // fieldGrouperId field in the tuple, as specified in the // constructor OperationManager operationManager; if (fieldGrouperId == null) { // single instance operation mode operationManager = getOperationManager(null); } else { // try to select an operationManager from the value of the // fieldGrouperId field String fieldGrouperValue = fieldGrouperValues.get(inputParticle); if (fieldGrouperValue != null) { // fieldGrouperId exists operationManager = getOperationManager(fieldGrouperValue); } else { operationManager = null; logger.error("Specified fieldGrouperId " + fieldGrouperId + " does not exists in particle " + inputParticle + ". Therefore can not route it to a specific operation."); } } if (operationManager == null) { return null; } else { return operationManager.processDataParticle(inputParticle); } } /** * Process a single MetaParticle. This method sends the MetaParticle to all * {@link OperationManager}s. * * @param inputParticle * originalTuple mapped to a MetaParticle * @return List of output particles */ private List<Particle> processMetaParticle(MetaParticle inputParticle) { // broadcast metaParticle to all operationManagers List<Particle> outputParticles = new ArrayList<Particle>(); Collection<OperationManager> allOperationManagers = operationManagers .values(); for (OperationManager operationManager : allOperationManagers) { List<Particle> particles = operationManager .processMetaParticle(inputParticle); if (particles != null) { outputParticles.addAll(particles); } } return outputParticles; } /** * Returns the operationManager related to the fieldGrouper, or instantiate * one if it was not present. * * @param fieldGrouperValue * Name of the field to group on (null if this bolt has a * {@link SingleParticleOperation}) * @return Returns an operationManager for the fieldGrouper. Returns null in * case of an exception, this will be logged. */ @SuppressWarnings("unchecked") private OperationManager getOperationManager(String fieldGrouperValue) { OperationManager operationManager = operationManagers .get(fieldGrouperValue); try { // no operation manager present yet for the fieldGrouper if (operationManager == null) { if (SingleParticleOperation.class .isAssignableFrom(operationClass)) { // Single Operation operationManager = new OperationManager( fieldGrouperValue, (Class<? extends SingleParticleOperation>) operationClass, stormNativeConfig, zookeeperStormConfiguration); } else if (ParticleBatchOperation.class .isAssignableFrom(operationClass)) { // Batch Operation operationManager = new OperationManager( fieldGrouperValue, batcherClass, (Class<? extends ParticleBatchOperation>) operationClass, stormNativeConfig, zookeeperStormConfiguration); } else { // Apparently a new constructor is added to create a new // type of operation logger.error("Internal error, unknown operation class " + operationClass.getName()); } // register the new operation manager for this // fieldGrouperValue operationManagers.put(fieldGrouperValue, operationManager); } } catch (InstantiationException | IllegalAccessException e) { logger.error("For fieldGrouper " + fieldGrouperId + "(" + fieldGrouperValue + ") : can not create an operationManager for the operation (" + operationClass.getName() + ") msg=" + e); operationManager = null; } return operationManager; } @Override public void declareOutputFields(OutputFieldsDeclarer declarer) { // merge all output particle fields for DataParticles Fields fields = null; List<Class<? extends DataParticle>> outputParticles = OperationManager .getOutputDataParticles(operationClass); for (Class<? extends DataParticle> outputParticleClass : outputParticles) { fields = ParticleMapper.mergeFields(fields, ParticleMapper.getFields(outputParticleClass)); } // Add fields for MetaParticles fields = ParticleMapper.mergeFields(fields, metaParticleFields); nrOfOutputFields = fields.size(); declarer.declare(fields); } /** * Emit a particle, anchored to the anchor tuple. * * @param anchor * Tuple to anchor on * @param particle * Particle to emit */ public void emitParticle(Tuple anchor, Particle particle) { if (particle != null) { fieldGrouperValues.remove(particle); if (particle instanceof MetaParticle) { ((MetaParticle) particle).setOriginId(originId); } collector .emit(anchor, ParticleMapper.particleToValues(particle, nrOfOutputFields)); } } /** * Emit a list of particles, if the list is not null or empty. Each particle * will be anchored. * * @param anchor * Tuple to anchor on * @param particles * Particles to emit */ public void emitParticles(Tuple anchor, List<? extends Particle> particles) { if (particles != null) { for (Particle particle : particles) { emitParticle(anchor, particle); } } } }
/* * Copyright 2015-2017 Austin Keener & Michael Ritter * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.dv8tion.jda.core.requests.ratelimit; import com.mashape.unirest.http.HttpResponse; import net.dv8tion.jda.core.entities.impl.JDAImpl; import net.dv8tion.jda.core.events.ExceptionEvent; import net.dv8tion.jda.core.requests.RateLimiter; import net.dv8tion.jda.core.requests.Request; import net.dv8tion.jda.core.requests.Requester; import net.dv8tion.jda.core.requests.Route; import net.dv8tion.jda.core.requests.Route.CompiledRoute; import net.dv8tion.jda.core.requests.Route.RateLimit; import org.json.JSONObject; import java.util.Iterator; import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.TimeUnit; public class ClientRateLimiter extends RateLimiter { volatile Long globalCooldown = null; public ClientRateLimiter(Requester requester, int poolSize) { super(requester, poolSize); } @Override public Long getRateLimit(Route.CompiledRoute route) { Bucket bucket = getBucket(route); synchronized (bucket) { return bucket.getRateLimit(); } } @Override protected void queueRequest(Request request) { if (isShutdown) throw new RejectedExecutionException("Cannot queue a request after shutdown"); Bucket bucket = getBucket(request.getRoute()); synchronized (bucket) { bucket.addToQueue(request); } } @Override protected Long handleResponse(Route.CompiledRoute route, HttpResponse<String> response) { Bucket bucket = getBucket(route); synchronized (bucket) { long now = System.currentTimeMillis(); int code = response.getStatus(); if (code == 429) { JSONObject limitObj = new JSONObject(response.getBody()); long retryAfter = limitObj.getLong("retry_after"); if (limitObj.has("global") && limitObj.getBoolean("global")) //Global ratelimit { globalCooldown = now + retryAfter; } else { bucket.retryAfter = now + retryAfter; } return retryAfter; } else { return null; } } } private Bucket getBucket(CompiledRoute route) { String baseRoute = route.getBaseRoute().getRoute(); Bucket bucket = (Bucket) buckets.get(baseRoute); if (bucket == null) { synchronized (buckets) { bucket = (Bucket) buckets.get(baseRoute); if (bucket == null) { bucket = new Bucket(baseRoute, route.getBaseRoute().getRatelimit()); buckets.put(baseRoute, bucket); } } } return bucket; } private class Bucket implements IBucket, Runnable { final String route; final RateLimit rateLimit; volatile long retryAfter = 0; volatile ConcurrentLinkedQueue<Request> requests = new ConcurrentLinkedQueue<>(); public Bucket(String route, RateLimit rateLimit) { this.route = route; this.rateLimit = rateLimit; } void addToQueue(Request request) { requests.add(request); submitForProcessing(); } void submitForProcessing() { synchronized (submittedBuckets) { if (!submittedBuckets.contains(this)) { Long delay = getRateLimit(); if (delay == null) delay = 0L; pool.schedule(this, delay, TimeUnit.MILLISECONDS); submittedBuckets.add(this); } } } Long getRateLimit() { long now = System.currentTimeMillis(); if (globalCooldown != null) //Are we on global cooldown? { if (now > globalCooldown) //Verify that we should still be on cooldown. { globalCooldown = null; //If we are done cooling down, reset the globalCooldown and continue. } else { return globalCooldown - now; //If we should still be on cooldown, return when we can go again. } } if (this.retryAfter > now) { return this.retryAfter - now; } else { return null; } } @Override public boolean equals(Object o) { if (!(o instanceof Bucket)) return false; Bucket oBucket = (Bucket) o; return route.equals(oBucket.route); } @Override public int hashCode() { return route.hashCode(); } @Override public void run() { try { synchronized (requests) { for (Iterator<Request> it = requests.iterator(); it.hasNext(); ) { Request request = null; try { request = it.next(); Long retryAfter = requester.execute(request); if (retryAfter != null) { break; } else { it.remove(); } } catch (Throwable t) { Requester.LOG.fatal("Requester system encountered an internal error"); Requester.LOG.log(t); it.remove(); if (request != null) request.onFailure(t); } } synchronized (submittedBuckets) { submittedBuckets.remove(this); if (!requests.isEmpty()) { try { this.submitForProcessing(); } catch (RejectedExecutionException e) { Requester.LOG.debug("Caught RejectedExecutionException when re-queuing a ratelimited request. The requester is probably shutdown, thus, this can be ignored."); } } } } } catch (Throwable err) { Requester.LOG.fatal("Requester system encountered an internal error from beyond the synchronized execution blocks. NOT GOOD!"); Requester.LOG.log(err); if (err instanceof Error) { JDAImpl api = requester.getJDA(); api.getEventManager().handle(new ExceptionEvent(api, err, true)); } } } @Override public RateLimit getRatelimit() { return rateLimit; } @Override public String getRoute() { return route; } @Override public Queue<Request> getRequests() { return requests; } } }
/******************************************************************************* * Copyright 2013 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.badlogic.gdx.maps.tiled.renderers; import static com.badlogic.gdx.graphics.g2d.Batch.C1; import static com.badlogic.gdx.graphics.g2d.Batch.C2; import static com.badlogic.gdx.graphics.g2d.Batch.C3; import static com.badlogic.gdx.graphics.g2d.Batch.C4; import static com.badlogic.gdx.graphics.g2d.Batch.U1; import static com.badlogic.gdx.graphics.g2d.Batch.U2; import static com.badlogic.gdx.graphics.g2d.Batch.U3; import static com.badlogic.gdx.graphics.g2d.Batch.U4; import static com.badlogic.gdx.graphics.g2d.Batch.V1; import static com.badlogic.gdx.graphics.g2d.Batch.V2; import static com.badlogic.gdx.graphics.g2d.Batch.V3; import static com.badlogic.gdx.graphics.g2d.Batch.V4; import static com.badlogic.gdx.graphics.g2d.Batch.X1; import static com.badlogic.gdx.graphics.g2d.Batch.X2; import static com.badlogic.gdx.graphics.g2d.Batch.X3; import static com.badlogic.gdx.graphics.g2d.Batch.X4; import static com.badlogic.gdx.graphics.g2d.Batch.Y1; import static com.badlogic.gdx.graphics.g2d.Batch.Y2; import static com.badlogic.gdx.graphics.g2d.Batch.Y3; import static com.badlogic.gdx.graphics.g2d.Batch.Y4; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.g2d.Batch; import com.badlogic.gdx.graphics.g2d.TextureRegion; import com.badlogic.gdx.maps.tiled.TiledMap; import com.badlogic.gdx.maps.tiled.TiledMapTile; import com.badlogic.gdx.maps.tiled.TiledMapTileLayer; import com.badlogic.gdx.maps.tiled.TiledMapTileLayer.Cell; public class OrthogonalTiledMapRenderer extends BatchTiledMapRenderer { public OrthogonalTiledMapRenderer (TiledMap map) { super(map); } public OrthogonalTiledMapRenderer (TiledMap map, Batch batch) { super(map, batch); } public OrthogonalTiledMapRenderer (TiledMap map, float unitScale) { super(map, unitScale); } public OrthogonalTiledMapRenderer (TiledMap map, float unitScale, Batch batch) { super(map, unitScale, batch); } @Override public void renderTileLayer (TiledMapTileLayer layer) { final Color batchColor = batch.getColor(); final float color = Color.toFloatBits(batchColor.r, batchColor.g, batchColor.b, batchColor.a * layer.getOpacity()); final int layerWidth = layer.getWidth(); final int layerHeight = layer.getHeight(); final float layerTileWidth = layer.getTileWidth() * unitScale; final float layerTileHeight = layer.getTileHeight() * unitScale; final float layerOffsetX = layer.getOffsetX() * unitScale; // offset in tiled is y down, so we flip it final float layerOffsetY = -layer.getOffsetY() * unitScale; final int col1 = Math.max(0, (int)((viewBounds.x - layerOffsetX) / layerTileWidth)); final int col2 = Math.min(layerWidth, (int)((viewBounds.x + viewBounds.width + layerTileWidth - layerOffsetX) / layerTileWidth)); final int row1 = Math.max(0, (int)((viewBounds.y - layerOffsetY) / layerTileHeight)); final int row2 = Math.min(layerHeight, (int)((viewBounds.y + viewBounds.height + layerTileHeight - layerOffsetY) / layerTileHeight)); float y = row2 * layerTileHeight + layerOffsetY; float xStart = col1 * layerTileWidth + layerOffsetX; final float[] vertices = this.vertices; for (int row = row2; row >= row1; row--) { float x = xStart; for (int col = col1; col < col2; col++) { final TiledMapTileLayer.Cell cell = layer.getCell(col, row); if (cell == null) { x += layerTileWidth; continue; } final TiledMapTile tile = cell.getTile(); if (tile != null) { final boolean flipX = cell.getFlipHorizontally(); final boolean flipY = cell.getFlipVertically(); final int rotations = cell.getRotation(); TextureRegion region = tile.getTextureRegion(); float x1 = x + tile.getOffsetX() * unitScale; float y1 = y + tile.getOffsetY() * unitScale; float x2 = x1 + region.getRegionWidth() * unitScale; float y2 = y1 + region.getRegionHeight() * unitScale; float u1 = region.getU(); float v1 = region.getV2(); float u2 = region.getU2(); float v2 = region.getV(); vertices[X1] = x1; vertices[Y1] = y1; vertices[C1] = color; vertices[U1] = u1; vertices[V1] = v1; vertices[X2] = x1; vertices[Y2] = y2; vertices[C2] = color; vertices[U2] = u1; vertices[V2] = v2; vertices[X3] = x2; vertices[Y3] = y2; vertices[C3] = color; vertices[U3] = u2; vertices[V3] = v2; vertices[X4] = x2; vertices[Y4] = y1; vertices[C4] = color; vertices[U4] = u2; vertices[V4] = v1; if (flipX) { float temp = vertices[U1]; vertices[U1] = vertices[U3]; vertices[U3] = temp; temp = vertices[U2]; vertices[U2] = vertices[U4]; vertices[U4] = temp; } if (flipY) { float temp = vertices[V1]; vertices[V1] = vertices[V3]; vertices[V3] = temp; temp = vertices[V2]; vertices[V2] = vertices[V4]; vertices[V4] = temp; } if (rotations != 0) { switch (rotations) { case Cell.ROTATE_90: { float tempV = vertices[V1]; vertices[V1] = vertices[V2]; vertices[V2] = vertices[V3]; vertices[V3] = vertices[V4]; vertices[V4] = tempV; float tempU = vertices[U1]; vertices[U1] = vertices[U2]; vertices[U2] = vertices[U3]; vertices[U3] = vertices[U4]; vertices[U4] = tempU; break; } case Cell.ROTATE_180: { float tempU = vertices[U1]; vertices[U1] = vertices[U3]; vertices[U3] = tempU; tempU = vertices[U2]; vertices[U2] = vertices[U4]; vertices[U4] = tempU; float tempV = vertices[V1]; vertices[V1] = vertices[V3]; vertices[V3] = tempV; tempV = vertices[V2]; vertices[V2] = vertices[V4]; vertices[V4] = tempV; break; } case Cell.ROTATE_270: { float tempV = vertices[V1]; vertices[V1] = vertices[V4]; vertices[V4] = vertices[V3]; vertices[V3] = vertices[V2]; vertices[V2] = tempV; float tempU = vertices[U1]; vertices[U1] = vertices[U4]; vertices[U4] = vertices[U3]; vertices[U3] = vertices[U2]; vertices[U2] = tempU; break; } } } batch.draw(region.getTexture(), vertices, 0, NUM_VERTICES); } x += layerTileWidth; } y -= layerTileHeight; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.search.facet; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.function.IntFunction; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; import org.apache.solr.schema.SchemaField; /** * Accumulates stats separated by slot number for the fields with {@link * org.apache.lucene.index.DocValues} */ public abstract class DocValuesAcc extends SlotAcc { SchemaField sf; public DocValuesAcc(FacetContext fcontext, SchemaField sf) throws IOException { super(fcontext); this.sf = sf; } @Override public void collect(int doc, int slot, IntFunction<SlotContext> slotContext) throws IOException { if (advanceExact(doc)) { collectValues(doc, slot); } } protected abstract void collectValues(int doc, int slot) throws IOException; /** * Wrapper to {@code org.apache.lucene.index.DocValuesIterator#advanceExact(int)} returns whether * or not given {@code doc} has value */ protected abstract boolean advanceExact(int doc) throws IOException; /** Accumulator for {@link NumericDocValues} */ abstract class NumericDVAcc extends DocValuesAcc { NumericDocValues values; public NumericDVAcc(FacetContext fcontext, SchemaField sf) throws IOException { super(fcontext, sf); } @Override public void setNextReader(LeafReaderContext readerContext) throws IOException { super.setNextReader(readerContext); values = DocValues.getNumeric(readerContext.reader(), sf.getName()); } @Override protected boolean advanceExact(int doc) throws IOException { return values.advanceExact(doc); } } /** Accumulator for {@link SortedNumericDocValues} */ abstract static class SortedNumericDVAcc extends DocValuesAcc { SortedNumericDocValues values; public SortedNumericDVAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { super(fcontext, sf); } @Override public void setNextReader(LeafReaderContext readerContext) throws IOException { super.setNextReader(readerContext); values = DocValues.getSortedNumeric(readerContext.reader(), sf.getName()); } @Override protected boolean advanceExact(int doc) throws IOException { return values.advanceExact(doc); } } abstract static class LongSortedNumericDVAcc extends SortedNumericDVAcc { long[] result; long initialValue; public LongSortedNumericDVAcc( FacetContext fcontext, SchemaField sf, int numSlots, long initialValue) throws IOException { super(fcontext, sf, numSlots); this.result = new long[numSlots]; this.initialValue = initialValue; if (initialValue != 0) { Arrays.fill(result, initialValue); } } @Override public int compare(int slotA, int slotB) { return Long.compare(result[slotA], result[slotB]); } @Override public Object getValue(int slotNum) throws IOException { return result[slotNum]; } @Override public void reset() throws IOException { Arrays.fill(result, initialValue); } @Override public void resize(Resizer resizer) { this.result = resizer.resize(result, initialValue); } } abstract static class DoubleSortedNumericDVAcc extends SortedNumericDVAcc { double[] result; double initialValue; public DoubleSortedNumericDVAcc( FacetContext fcontext, SchemaField sf, int numSlots, double initialValue) throws IOException { super(fcontext, sf, numSlots); this.result = new double[numSlots]; this.initialValue = initialValue; if (initialValue != 0) { Arrays.fill(result, initialValue); } } @Override public int compare(int slotA, int slotB) { return Double.compare(result[slotA], result[slotB]); } @Override public Object getValue(int slotNum) throws IOException { return result[slotNum]; } @Override public void reset() throws IOException { Arrays.fill(result, initialValue); } @Override public void resize(Resizer resizer) { this.result = resizer.resize(result, initialValue); } /** converts given long value to double based on field type */ protected double getDouble(long val) { switch (sf.getType().getNumberType()) { case INTEGER: case LONG: case DATE: return val; case FLOAT: return NumericUtils.sortableIntToFloat((int) val); case DOUBLE: return NumericUtils.sortableLongToDouble(val); default: // this would never happen return 0.0d; } } } /** * Base class for standard deviation and variance computation for fields with {@link * SortedNumericDocValues} */ abstract static class SDVSortedNumericAcc extends DoubleSortedNumericDVAcc { int[] counts; double[] sum; public SDVSortedNumericAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { super(fcontext, sf, numSlots, 0); this.counts = new int[numSlots]; this.sum = new double[numSlots]; } @Override protected void collectValues(int doc, int slot) throws IOException { for (int i = 0, count = values.docValueCount(); i < count; i++) { double val = getDouble(values.nextValue()); result[slot] += val * val; sum[slot] += val; counts[slot]++; } } protected abstract double computeVal(int slot); @Override public int compare(int slotA, int slotB) { return Double.compare(computeVal(slotA), computeVal(slotB)); } @Override public Object getValue(int slot) { if (fcontext.isShard()) { ArrayList<Number> lst = new ArrayList<>(3); lst.add(counts[slot]); lst.add(result[slot]); lst.add(sum[slot]); return lst; } else { return computeVal(slot); } } @Override public void reset() throws IOException { super.reset(); Arrays.fill(counts, 0); Arrays.fill(sum, 0); } @Override public void resize(Resizer resizer) { super.resize(resizer); this.counts = resizer.resize(counts, 0); this.sum = resizer.resize(sum, 0); } } /** Accumulator for {@link SortedDocValues} */ abstract class SortedDVAcc extends DocValuesAcc { SortedDocValues values; public SortedDVAcc(FacetContext fcontext, SchemaField sf) throws IOException { super(fcontext, sf); } @Override public void setNextReader(LeafReaderContext readerContext) throws IOException { super.setNextReader(readerContext); values = DocValues.getSorted(readerContext.reader(), sf.getName()); } @Override protected boolean advanceExact(int doc) throws IOException { return values.advanceExact(doc); } } /** Accumulator for {@link SortedSetDocValues} */ abstract static class SortedSetDVAcc extends DocValuesAcc { SortedSetDocValues values; public SortedSetDVAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { super(fcontext, sf); } @Override public void setNextReader(LeafReaderContext readerContext) throws IOException { super.setNextReader(readerContext); values = DocValues.getSortedSet(readerContext.reader(), sf.getName()); } @Override protected boolean advanceExact(int doc) throws IOException { return values.advanceExact(doc); } } abstract static class LongSortedSetDVAcc extends SortedSetDVAcc { long[] result; long initialValue; public LongSortedSetDVAcc( FacetContext fcontext, SchemaField sf, int numSlots, long initialValue) throws IOException { super(fcontext, sf, numSlots); result = new long[numSlots]; this.initialValue = initialValue; if (initialValue != 0) { Arrays.fill(result, initialValue); } } @Override public int compare(int slotA, int slotB) { return Long.compare(result[slotA], result[slotB]); } @Override public Object getValue(int slotNum) throws IOException { return result[slotNum]; } @Override public void reset() throws IOException { Arrays.fill(result, initialValue); } @Override public void resize(Resizer resizer) { this.result = resizer.resize(result, initialValue); } } abstract static class DoubleSortedSetDVAcc extends SortedSetDVAcc { double[] result; double initialValue; public DoubleSortedSetDVAcc( FacetContext fcontext, SchemaField sf, int numSlots, long initialValue) throws IOException { super(fcontext, sf, numSlots); result = new double[numSlots]; this.initialValue = initialValue; if (initialValue != 0) { Arrays.fill(result, initialValue); } } @Override public int compare(int slotA, int slotB) { return Double.compare(result[slotA], result[slotB]); } @Override public Object getValue(int slotNum) throws IOException { return result[slotNum]; } @Override public void reset() throws IOException { Arrays.fill(result, initialValue); } @Override public void resize(Resizer resizer) { this.result = resizer.resize(result, initialValue); } } /** * Base class for standard deviation and variance computation for fields with {@link * SortedSetDocValues} */ abstract static class SDVSortedSetAcc extends DoubleSortedSetDVAcc { int[] counts; double[] sum; public SDVSortedSetAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException { super(fcontext, sf, numSlots, 0); this.counts = new int[numSlots]; this.sum = new double[numSlots]; } @Override protected void collectValues(int doc, int slot) throws IOException { long ord; while ((ord = values.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { BytesRef term = values.lookupOrd(ord); Object obj = sf.getType().toObject(sf, term); double val = obj instanceof Date ? ((Date) obj).getTime() : ((Number) obj).doubleValue(); result[slot] += val * val; sum[slot] += val; counts[slot]++; } } protected abstract double computeVal(int slot); @Override public int compare(int slotA, int slotB) { return Double.compare(computeVal(slotA), computeVal(slotB)); } @Override public Object getValue(int slot) { if (fcontext.isShard()) { ArrayList<Number> lst = new ArrayList<>(3); lst.add(counts[slot]); lst.add(result[slot]); lst.add(sum[slot]); return lst; } else { return computeVal(slot); } } @Override public void reset() throws IOException { super.reset(); Arrays.fill(counts, 0); Arrays.fill(sum, 0); } @Override public void resize(Resizer resizer) { super.resize(resizer); this.counts = resizer.resize(counts, 0); this.sum = resizer.resize(sum, 0); } } }
package org.apache.helix.model; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; import org.apache.helix.HelixDefinedState; import org.apache.helix.HelixProperty; import org.apache.helix.ZNRecord; import org.apache.helix.model.builder.StateTransitionTableBuilder; import org.apache.helix.model.util.StateModelDefinitionValidator; /** * Describe the state model */ public class StateModelDefinition extends HelixProperty { public enum StateModelDefinitionProperty { INITIAL_STATE, STATE_TRANSITION_PRIORITYLIST, STATE_PRIORITY_LIST } /** * state model's initial state */ private final String _initialState; /** * State Names in priority order. Indicates the order in which states are * fulfilled */ private final List<String> _statesPriorityList; /** * Specifies the number of instances for a given state <br> * -1 don't care, don't try to keep any resource in this state on any instance <br> * >0 any integer number greater than 0 specifies the number of instances * needed to be in this state <br> * R all instances in the preference list can be in this state <br> * N all instances in the cluster will be put in this state.PreferenceList * must be denoted as '*' */ private final Map<String, String> _statesCountMap; private final List<String> _stateTransitionPriorityList; /** * StateTransition which is used to find the nextState given StartState and * FinalState */ private final Map<String, Map<String, String>> _stateTransitionTable; /** * Instantiate from a pre-populated record * @param record ZNRecord representing a state model definition */ public StateModelDefinition(ZNRecord record) { super(record); _initialState = record.getSimpleField(StateModelDefinitionProperty.INITIAL_STATE.toString()); if (_initialState == null) { throw new IllegalArgumentException("initial-state for " + record.getId() + " is null"); } _statesPriorityList = record.getListField(StateModelDefinitionProperty.STATE_PRIORITY_LIST.toString()); _stateTransitionPriorityList = record.getListField(StateModelDefinitionProperty.STATE_TRANSITION_PRIORITYLIST.toString()); _stateTransitionTable = new HashMap<String, Map<String, String>>(); _statesCountMap = new HashMap<String, String>(); if (_statesPriorityList != null) { for (String state : _statesPriorityList) { Map<String, String> metaData = record.getMapField(state + ".meta"); if (metaData != null) { if (metaData.get("count") != null) { _statesCountMap.put(state, metaData.get("count")); } } Map<String, String> nextData = record.getMapField(state + ".next"); _stateTransitionTable.put(state, nextData); } } // add transitions for helix-defined states for (HelixDefinedState state : HelixDefinedState.values()) { if (!_statesPriorityList.contains(state.toString())) { _statesCountMap.put(state.toString(), "-1"); } } addDefaultTransition(HelixDefinedState.ERROR.toString(), HelixDefinedState.DROPPED.toString(), HelixDefinedState.DROPPED.toString()); addDefaultTransition(HelixDefinedState.ERROR.toString(), _initialState, _initialState); addDefaultTransition(_initialState, HelixDefinedState.DROPPED.toString(), HelixDefinedState.DROPPED.toString()); } /** * add transitions involving helix-defines states * these transitions need not to be specified in state-model-definition * @param from source state * @param to destination state * @param next intermediate state to reach the destination */ void addDefaultTransition(String from, String to, String next) { if (!_stateTransitionTable.containsKey(from)) { _stateTransitionTable.put(from, new TreeMap<String, String>()); } if (!_stateTransitionTable.get(from).containsKey(to)) { _stateTransitionTable.get(from).put(to, next); } } /** * Get an ordered priority list of transitions * @return transitions in the form SRC-DEST, the first of which is highest priority */ public List<String> getStateTransitionPriorityList() { return _stateTransitionPriorityList; } /** * Get an ordered priority list of states * @return state names, the first of which is highest priority */ public List<String> getStatesPriorityList() { return _statesPriorityList; } /** * Get the intermediate state required to transition from one state to the other * @param fromState the source * @param toState the destination * @return the intermediate state */ public String getNextStateForTransition(String fromState, String toState) { Map<String, String> map = _stateTransitionTable.get(fromState); if (map != null) { return map.get(toState); } return null; } /** * Get the starting state in the model * @return name of the initial state */ public String getInitialState() { return _initialState; } /** * Number of instances that can be in each state * @param state the state name * @return maximum instance count per state, can be "N" or "R" */ public String getNumInstancesPerState(String state) { return _statesCountMap.get(state); } @Override public boolean isValid() { return StateModelDefinitionValidator.isStateModelDefinitionValid(this); } // TODO move this to model.builder package, refactor StateModelConfigGenerator to use this /** * Construct a state model */ public static class Builder { private final String _statemodelName; private String initialState; Map<String, Integer> statesMap; Map<Transition, Integer> transitionMap; Map<String, String> stateConstraintMap; /** * Start building a state model with a name * @param name state model name */ public Builder(String name) { this._statemodelName = name; statesMap = new HashMap<String, Integer>(); transitionMap = new HashMap<Transition, Integer>(); stateConstraintMap = new HashMap<String, String>(); } /** * initial state of a replica when it starts, most commonly used initial * state is OFFLINE * @param initialState */ public Builder initialState(String initialState) { this.initialState = initialState; return this; } /** * Define all valid states using this method. Set the priority in which the * constraints must be satisfied. Lets say STATE1 has a constraint of 1 and * STATE2 has a constraint of 3 but only one node is up then Helix will uses * the priority to see STATE constraint has to be given higher preference <br/> * Use -1 to indicates states with no constraints, like OFFLINE * @param state * @param priority */ public Builder addState(String state, int priority) { statesMap.put(state, priority); return this; } /** * Sets the priority to Integer.MAX_VALUE * @param state */ public Builder addState(String state) { addState(state, Integer.MAX_VALUE); return this; } /** * Define all legal transitions between states using this method. Priority * is used to order the transitions. Helix tries to maximize the number of * transitions that can be fired in parallel without violating the * constraint. The transitions are first sorted based on priority and * transitions are selected in a greedy way until the constriants are not * violated. * @param fromState source * @param toState destination * @param priority priority, higher value is higher priority * @return Builder */ public Builder addTransition(String fromState, String toState, int priority) { transitionMap.put(new Transition(fromState, toState), priority); return this; } /** * Add a state transition with maximal priority value * @see #addTransition(String, String, int) * @param fromState * @param toState * @return Builder */ public Builder addTransition(String fromState, String toState) { addTransition(fromState, toState, Integer.MAX_VALUE); return this; } /** * Set a maximum for replicas in this state * @param state state name * @param upperBound maximum * @return Builder */ public Builder upperBound(String state, int upperBound) { stateConstraintMap.put(state, String.valueOf(upperBound)); return this; } /** * You can use this to have the bounds dynamically change based on other * parameters. <br/> * Currently support 2 values <br/> * R --> Refers to the number of replicas specified during resource * creation. This allows having different replication factor for each * resource without having to create a different state machine. <br/> * N --> Refers to all nodes in the cluster. Useful for resources that need * to exist on all nodes. This way one can add/remove nodes without having * the change the bounds. * @param state * @param bound * @return Builder */ public Builder dynamicUpperBound(String state, String bound) { stateConstraintMap.put(state, bound); return this; } /** * Create a StateModelDefinition from this Builder * @return StateModelDefinition */ public StateModelDefinition build() { ZNRecord record = new ZNRecord(_statemodelName); // get sorted state priorities by specified values ArrayList<String> statePriorityList = new ArrayList<String>(statesMap.keySet()); Comparator<? super String> c1 = new Comparator<String>() { @Override public int compare(String o1, String o2) { return statesMap.get(o1).compareTo(statesMap.get(o2)); } }; Collections.sort(statePriorityList, c1); // get sorted transition priorities by specified values ArrayList<Transition> transitionList = new ArrayList<Transition>(transitionMap.keySet()); Comparator<? super Transition> c2 = new Comparator<Transition>() { @Override public int compare(Transition o1, Transition o2) { return transitionMap.get(o1).compareTo(transitionMap.get(o2)); } }; Collections.sort(transitionList, c2); List<String> transitionPriorityList = new ArrayList<String>(transitionList.size()); for (Transition t : transitionList) { transitionPriorityList.add(t.toString()); } record.setSimpleField(StateModelDefinitionProperty.INITIAL_STATE.toString(), initialState); record.setListField(StateModelDefinitionProperty.STATE_PRIORITY_LIST.toString(), statePriorityList); record.setListField(StateModelDefinitionProperty.STATE_TRANSITION_PRIORITYLIST.toString(), transitionPriorityList); // compute full paths for next states StateTransitionTableBuilder stateTransitionTableBuilder = new StateTransitionTableBuilder(); Map<String, Map<String, String>> transitionTable = stateTransitionTableBuilder.buildTransitionTable(statePriorityList, new ArrayList<Transition>(transitionMap.keySet())); for (String state : transitionTable.keySet()) { record.setMapField(state + ".next", transitionTable.get(state)); } // state counts for (String state : statePriorityList) { HashMap<String, String> metadata = new HashMap<String, String>(); if (stateConstraintMap.get(state) != null) { metadata.put("count", stateConstraintMap.get(state)); } else { metadata.put("count", "-1"); } record.setMapField(state + ".meta", metadata); } return new StateModelDefinition(record); } } @Override public boolean equals(Object o) { if (o == null) { return false; } if (!(o instanceof StateModelDefinition)) { return false; } StateModelDefinition stateModelDefinition = (StateModelDefinition) o; return _initialState.equals(stateModelDefinition._initialState) && _statesCountMap .equals(stateModelDefinition._statesCountMap) && _statesPriorityList .equals(stateModelDefinition._statesPriorityList) && _stateTransitionPriorityList .equals(stateModelDefinition._stateTransitionPriorityList) && _stateTransitionTable.equals(stateModelDefinition._stateTransitionTable); } /** * Get the state to its count map, order in its state priority. * * @return state count map: state->count */ public static LinkedHashMap<String, Integer> getStateCountMap( StateModelDefinition stateModelDef, int candidateNodeNum, int totalReplicas) { LinkedHashMap<String, Integer> stateCountMap = new LinkedHashMap<String, Integer>(); List<String> statesPriorityList = stateModelDef.getStatesPriorityList(); int replicas = totalReplicas; for (String state : statesPriorityList) { String num = stateModelDef.getNumInstancesPerState(state); if ("N".equals(num)) { stateCountMap.put(state, candidateNodeNum); } else if ("R".equals(num)) { // wait until we get the counts for all other states continue; } else { int stateCount = -1; try { stateCount = Integer.parseInt(num); } catch (Exception e) { // LOG.error("Invalid count for state: " + state + ", count: " + num + // ", use -1 instead"); } if (stateCount > 0) { stateCountMap.put(state, stateCount); replicas -= stateCount; } } } // get state count for R for (String state : statesPriorityList) { String num = stateModelDef.getNumInstancesPerState(state); if ("R".equals(num)) { stateCountMap.put(state, replicas); // should have at most one state using R break; } } return stateCountMap; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * Describes a transit gateway Connect peer. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/TransitGatewayConnectPeer" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class TransitGatewayConnectPeer implements Serializable, Cloneable { /** * <p> * The ID of the Connect attachment. * </p> */ private String transitGatewayAttachmentId; /** * <p> * The ID of the Connect peer. * </p> */ private String transitGatewayConnectPeerId; /** * <p> * The state of the Connect peer. * </p> */ private String state; /** * <p> * The creation time. * </p> */ private java.util.Date creationTime; /** * <p> * The Connect peer details. * </p> */ private TransitGatewayConnectPeerConfiguration connectPeerConfiguration; /** * <p> * The tags for the Connect peer. * </p> */ private com.amazonaws.internal.SdkInternalList<Tag> tags; /** * <p> * The ID of the Connect attachment. * </p> * * @param transitGatewayAttachmentId * The ID of the Connect attachment. */ public void setTransitGatewayAttachmentId(String transitGatewayAttachmentId) { this.transitGatewayAttachmentId = transitGatewayAttachmentId; } /** * <p> * The ID of the Connect attachment. * </p> * * @return The ID of the Connect attachment. */ public String getTransitGatewayAttachmentId() { return this.transitGatewayAttachmentId; } /** * <p> * The ID of the Connect attachment. * </p> * * @param transitGatewayAttachmentId * The ID of the Connect attachment. * @return Returns a reference to this object so that method calls can be chained together. */ public TransitGatewayConnectPeer withTransitGatewayAttachmentId(String transitGatewayAttachmentId) { setTransitGatewayAttachmentId(transitGatewayAttachmentId); return this; } /** * <p> * The ID of the Connect peer. * </p> * * @param transitGatewayConnectPeerId * The ID of the Connect peer. */ public void setTransitGatewayConnectPeerId(String transitGatewayConnectPeerId) { this.transitGatewayConnectPeerId = transitGatewayConnectPeerId; } /** * <p> * The ID of the Connect peer. * </p> * * @return The ID of the Connect peer. */ public String getTransitGatewayConnectPeerId() { return this.transitGatewayConnectPeerId; } /** * <p> * The ID of the Connect peer. * </p> * * @param transitGatewayConnectPeerId * The ID of the Connect peer. * @return Returns a reference to this object so that method calls can be chained together. */ public TransitGatewayConnectPeer withTransitGatewayConnectPeerId(String transitGatewayConnectPeerId) { setTransitGatewayConnectPeerId(transitGatewayConnectPeerId); return this; } /** * <p> * The state of the Connect peer. * </p> * * @param state * The state of the Connect peer. * @see TransitGatewayConnectPeerState */ public void setState(String state) { this.state = state; } /** * <p> * The state of the Connect peer. * </p> * * @return The state of the Connect peer. * @see TransitGatewayConnectPeerState */ public String getState() { return this.state; } /** * <p> * The state of the Connect peer. * </p> * * @param state * The state of the Connect peer. * @return Returns a reference to this object so that method calls can be chained together. * @see TransitGatewayConnectPeerState */ public TransitGatewayConnectPeer withState(String state) { setState(state); return this; } /** * <p> * The state of the Connect peer. * </p> * * @param state * The state of the Connect peer. * @return Returns a reference to this object so that method calls can be chained together. * @see TransitGatewayConnectPeerState */ public TransitGatewayConnectPeer withState(TransitGatewayConnectPeerState state) { this.state = state.toString(); return this; } /** * <p> * The creation time. * </p> * * @param creationTime * The creation time. */ public void setCreationTime(java.util.Date creationTime) { this.creationTime = creationTime; } /** * <p> * The creation time. * </p> * * @return The creation time. */ public java.util.Date getCreationTime() { return this.creationTime; } /** * <p> * The creation time. * </p> * * @param creationTime * The creation time. * @return Returns a reference to this object so that method calls can be chained together. */ public TransitGatewayConnectPeer withCreationTime(java.util.Date creationTime) { setCreationTime(creationTime); return this; } /** * <p> * The Connect peer details. * </p> * * @param connectPeerConfiguration * The Connect peer details. */ public void setConnectPeerConfiguration(TransitGatewayConnectPeerConfiguration connectPeerConfiguration) { this.connectPeerConfiguration = connectPeerConfiguration; } /** * <p> * The Connect peer details. * </p> * * @return The Connect peer details. */ public TransitGatewayConnectPeerConfiguration getConnectPeerConfiguration() { return this.connectPeerConfiguration; } /** * <p> * The Connect peer details. * </p> * * @param connectPeerConfiguration * The Connect peer details. * @return Returns a reference to this object so that method calls can be chained together. */ public TransitGatewayConnectPeer withConnectPeerConfiguration(TransitGatewayConnectPeerConfiguration connectPeerConfiguration) { setConnectPeerConfiguration(connectPeerConfiguration); return this; } /** * <p> * The tags for the Connect peer. * </p> * * @return The tags for the Connect peer. */ public java.util.List<Tag> getTags() { if (tags == null) { tags = new com.amazonaws.internal.SdkInternalList<Tag>(); } return tags; } /** * <p> * The tags for the Connect peer. * </p> * * @param tags * The tags for the Connect peer. */ public void setTags(java.util.Collection<Tag> tags) { if (tags == null) { this.tags = null; return; } this.tags = new com.amazonaws.internal.SdkInternalList<Tag>(tags); } /** * <p> * The tags for the Connect peer. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the * existing values. * </p> * * @param tags * The tags for the Connect peer. * @return Returns a reference to this object so that method calls can be chained together. */ public TransitGatewayConnectPeer withTags(Tag... tags) { if (this.tags == null) { setTags(new com.amazonaws.internal.SdkInternalList<Tag>(tags.length)); } for (Tag ele : tags) { this.tags.add(ele); } return this; } /** * <p> * The tags for the Connect peer. * </p> * * @param tags * The tags for the Connect peer. * @return Returns a reference to this object so that method calls can be chained together. */ public TransitGatewayConnectPeer withTags(java.util.Collection<Tag> tags) { setTags(tags); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getTransitGatewayAttachmentId() != null) sb.append("TransitGatewayAttachmentId: ").append(getTransitGatewayAttachmentId()).append(","); if (getTransitGatewayConnectPeerId() != null) sb.append("TransitGatewayConnectPeerId: ").append(getTransitGatewayConnectPeerId()).append(","); if (getState() != null) sb.append("State: ").append(getState()).append(","); if (getCreationTime() != null) sb.append("CreationTime: ").append(getCreationTime()).append(","); if (getConnectPeerConfiguration() != null) sb.append("ConnectPeerConfiguration: ").append(getConnectPeerConfiguration()).append(","); if (getTags() != null) sb.append("Tags: ").append(getTags()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof TransitGatewayConnectPeer == false) return false; TransitGatewayConnectPeer other = (TransitGatewayConnectPeer) obj; if (other.getTransitGatewayAttachmentId() == null ^ this.getTransitGatewayAttachmentId() == null) return false; if (other.getTransitGatewayAttachmentId() != null && other.getTransitGatewayAttachmentId().equals(this.getTransitGatewayAttachmentId()) == false) return false; if (other.getTransitGatewayConnectPeerId() == null ^ this.getTransitGatewayConnectPeerId() == null) return false; if (other.getTransitGatewayConnectPeerId() != null && other.getTransitGatewayConnectPeerId().equals(this.getTransitGatewayConnectPeerId()) == false) return false; if (other.getState() == null ^ this.getState() == null) return false; if (other.getState() != null && other.getState().equals(this.getState()) == false) return false; if (other.getCreationTime() == null ^ this.getCreationTime() == null) return false; if (other.getCreationTime() != null && other.getCreationTime().equals(this.getCreationTime()) == false) return false; if (other.getConnectPeerConfiguration() == null ^ this.getConnectPeerConfiguration() == null) return false; if (other.getConnectPeerConfiguration() != null && other.getConnectPeerConfiguration().equals(this.getConnectPeerConfiguration()) == false) return false; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getTransitGatewayAttachmentId() == null) ? 0 : getTransitGatewayAttachmentId().hashCode()); hashCode = prime * hashCode + ((getTransitGatewayConnectPeerId() == null) ? 0 : getTransitGatewayConnectPeerId().hashCode()); hashCode = prime * hashCode + ((getState() == null) ? 0 : getState().hashCode()); hashCode = prime * hashCode + ((getCreationTime() == null) ? 0 : getCreationTime().hashCode()); hashCode = prime * hashCode + ((getConnectPeerConfiguration() == null) ? 0 : getConnectPeerConfiguration().hashCode()); hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); return hashCode; } @Override public TransitGatewayConnectPeer clone() { try { return (TransitGatewayConnectPeer) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.storm; import org.apache.storm.container.ResourceIsolationInterface; import org.apache.storm.nimbus.ITopologyActionNotifierPlugin; import org.apache.storm.scheduler.resource.strategies.eviction.IEvictionStrategy; import org.apache.storm.scheduler.resource.strategies.priority.ISchedulingPriorityStrategy; import org.apache.storm.scheduler.resource.strategies.scheduling.IStrategy; import org.apache.storm.validation.ConfigValidation; import org.apache.storm.validation.Validated; import java.util.ArrayList; import java.util.Map; import static org.apache.storm.validation.ConfigValidationAnnotations.*; /** * Storm configs are specified as a plain old map. This class provides constants for * all the configurations possible on a Storm cluster. Each constant is paired with an annotation * that defines the validity criterion of the corresponding field. Default * values for these configs can be found in defaults.yaml. * * This class extends {@link org.apache.storm.Config} for supporting Storm Daemons. */ public class DaemonConfig implements Validated { /** * We check with this interval that whether the Netty channel is writable and try to write pending messages */ @isInteger public static final String STORM_NETTY_FLUSH_CHECK_INTERVAL_MS = "storm.messaging.netty.flush.check.interval.ms"; /** * A list of daemon metrics reporter plugin class names. * These plugins must implement {@link org.apache.storm.daemon.metrics.reporters.PreparableReporter} interface. */ @isStringList public static final String STORM_DAEMON_METRICS_REPORTER_PLUGINS = "storm.daemon.metrics.reporter.plugins"; /** * A specify Locale for daemon metrics reporter plugin. * Use the specified IETF BCP 47 language tag string for a Locale. */ @isString public static final String STORM_DAEMON_METRICS_REPORTER_PLUGIN_LOCALE = "storm.daemon.metrics.reporter.plugin.locale"; /** * A specify domain for daemon metrics reporter plugin to limit reporting to specific domain. */ @isString public static final String STORM_DAEMON_METRICS_REPORTER_PLUGIN_DOMAIN = "storm.daemon.metrics.reporter.plugin.domain"; /** * A specify rate-unit in TimeUnit to specify reporting frequency for daemon metrics reporter plugin. */ @isString public static final String STORM_DAEMON_METRICS_REPORTER_PLUGIN_RATE_UNIT = "storm.daemon.metrics.reporter.plugin.rate.unit"; /** * A specify duration-unit in TimeUnit to specify reporting window for daemon metrics reporter plugin. */ @isString public static final String STORM_DAEMON_METRICS_REPORTER_PLUGIN_DURATION_UNIT = "storm.daemon.metrics.reporter.plugin.duration.unit"; /** * A specify csv reporter directory for CvsPreparableReporter daemon metrics reporter. */ @isString public static final String STORM_DAEMON_METRICS_REPORTER_CSV_LOG_DIR = "storm.daemon.metrics.reporter.csv.log.dir"; /** * A directory that holds configuration files for log4j2. * It can be either a relative or an absolute directory. * If relative, it is relative to the storm's home directory. */ @isString public static final String STORM_LOG4J2_CONF_DIR = "storm.log4j2.conf.dir"; /** * A global task scheduler used to assign topologies's tasks to supervisors' workers. * * If this is not set, a default system scheduler will be used. */ @isString public static final String STORM_SCHEDULER = "storm.scheduler"; /** * Whether we want to display all the resource capacity and scheduled usage on the UI page. * You MUST have this variable set if you are using any kind of resource-related scheduler. * * If this is not set, we will not display resource capacity and usage on the UI. */ @isBoolean public static final String SCHEDULER_DISPLAY_RESOURCE = "scheduler.display.resource"; /** * Initialization parameters for the group mapping service plugin. * Provides a way for a @link{STORM_GROUP_MAPPING_SERVICE_PROVIDER_PLUGIN} * implementation to access optional settings. */ @isType(type=Map.class) public static final String STORM_GROUP_MAPPING_SERVICE_PARAMS = "storm.group.mapping.service.params"; /** * The directory where storm's health scripts go. */ @isString public static final String STORM_HEALTH_CHECK_DIR = "storm.health.check.dir"; /** * The time to allow any given healthcheck script to run before it * is marked failed due to timeout */ @isNumber public static final String STORM_HEALTH_CHECK_TIMEOUT_MS = "storm.health.check.timeout.ms"; /** * This is the user that the Nimbus daemon process is running as. May be used when security * is enabled to authorize actions in the cluster. */ @isString public static final String NIMBUS_DAEMON_USER = "nimbus.daemon.user"; /** * This parameter is used by the storm-deploy project to configure the * jvm options for the nimbus daemon. */ @isStringOrStringList public static final String NIMBUS_CHILDOPTS = "nimbus.childopts"; /** * How long without heartbeating a task can go before nimbus will consider the * task dead and reassign it to another location. */ @isInteger @isPositiveNumber public static final String NIMBUS_TASK_TIMEOUT_SECS = "nimbus.task.timeout.secs"; /** * How often nimbus should wake up to check heartbeats and do reassignments. Note * that if a machine ever goes down Nimbus will immediately wake up and take action. * This parameter is for checking for failures when there's no explicit event like that * occurring. */ @isInteger @isPositiveNumber public static final String NIMBUS_MONITOR_FREQ_SECS = "nimbus.monitor.freq.secs"; /** * How often nimbus should wake the cleanup thread to clean the inbox. * @see #NIMBUS_INBOX_JAR_EXPIRATION_SECS */ @isInteger @isPositiveNumber public static final String NIMBUS_CLEANUP_INBOX_FREQ_SECS = "nimbus.cleanup.inbox.freq.secs"; /** * The length of time a jar file lives in the inbox before being deleted by the cleanup thread. * * Probably keep this value greater than or equal to NIMBUS_CLEANUP_INBOX_JAR_EXPIRATION_SECS. * Note that the time it takes to delete an inbox jar file is going to be somewhat more than * NIMBUS_CLEANUP_INBOX_JAR_EXPIRATION_SECS (depending on how often NIMBUS_CLEANUP_FREQ_SECS * is set to). * @see #NIMBUS_CLEANUP_INBOX_FREQ_SECS */ @isInteger public static final String NIMBUS_INBOX_JAR_EXPIRATION_SECS = "nimbus.inbox.jar.expiration.secs"; /** * How long before a supervisor can go without heartbeating before nimbus considers it dead * and stops assigning new work to it. */ @isInteger @isPositiveNumber public static final String NIMBUS_SUPERVISOR_TIMEOUT_SECS = "nimbus.supervisor.timeout.secs"; /** * A special timeout used when a task is initially launched. During launch, this is the timeout * used until the first heartbeat, overriding nimbus.task.timeout.secs. * * <p>A separate timeout exists for launch because there can be quite a bit of overhead * to launching new JVM's and configuring them.</p> */ @isInteger @isPositiveNumber public static final String NIMBUS_TASK_LAUNCH_SECS = "nimbus.task.launch.secs"; /** * During upload/download with the master, how long an upload or download connection is idle * before nimbus considers it dead and drops the connection. */ @isInteger public static final String NIMBUS_FILE_COPY_EXPIRATION_SECS = "nimbus.file.copy.expiration.secs"; /** * A custom class that implements ITopologyValidator that is run whenever a * topology is submitted. Can be used to provide business-specific logic for * whether topologies are allowed to run or not. */ @isString public static final String NIMBUS_TOPOLOGY_VALIDATOR = "nimbus.topology.validator"; /** * Class name for authorization plugin for Nimbus */ @isString public static final String NIMBUS_AUTHORIZER = "nimbus.authorizer"; /** * Impersonation user ACL config entries. */ @isString public static final String NIMBUS_IMPERSONATION_AUTHORIZER = "nimbus.impersonation.authorizer"; /** * How often nimbus should wake up to renew credentials if needed. */ @isInteger @isPositiveNumber public static final String NIMBUS_CREDENTIAL_RENEW_FREQ_SECS = "nimbus.credential.renewers.freq.secs"; /** * FQCN of a class that implements {@code I} @see org.apache.storm.nimbus.ITopologyActionNotifierPlugin for details. */ @isImplementationOfClass(implementsClass = ITopologyActionNotifierPlugin.class) public static final String NIMBUS_TOPOLOGY_ACTION_NOTIFIER_PLUGIN = "nimbus.topology.action.notifier.plugin.class"; /** * Storm UI binds to this host/interface. */ @isString public static final String UI_HOST = "ui.host"; /** * Storm UI binds to this port. */ @isInteger @isPositiveNumber public static final String UI_PORT = "ui.port"; /** * Storm UI Project BUGTRACKER Link for reporting issue. */ @isString public static final String UI_PROJECT_BUGTRACKER_URL = "ui.project.bugtracker.url"; /** * Storm UI Central Logging URL. */ @isString public static final String UI_CENTRAL_LOGGING_URL = "ui.central.logging.url"; /** * HTTP UI port for log viewer */ @isInteger @isPositiveNumber public static final String LOGVIEWER_PORT = "logviewer.port"; /** * Childopts for log viewer java process. */ @isStringOrStringList public static final String LOGVIEWER_CHILDOPTS = "logviewer.childopts"; /** * How often to clean up old log files */ @isInteger @isPositiveNumber public static final String LOGVIEWER_CLEANUP_INTERVAL_SECS = "logviewer.cleanup.interval.secs"; /** * How many minutes since a log was last modified for the log to be considered for clean-up */ @isInteger @isPositiveNumber public static final String LOGVIEWER_CLEANUP_AGE_MINS = "logviewer.cleanup.age.mins"; /** * The maximum number of bytes all worker log files can take up in MB */ @isPositiveNumber public static final String LOGVIEWER_MAX_SUM_WORKER_LOGS_SIZE_MB = "logviewer.max.sum.worker.logs.size.mb"; /** * The maximum number of bytes per worker's files can take up in MB */ @isPositiveNumber public static final String LOGVIEWER_MAX_PER_WORKER_LOGS_SIZE_MB = "logviewer.max.per.worker.logs.size.mb"; /** * Storm Logviewer HTTPS port */ @isInteger @isPositiveNumber public static final String LOGVIEWER_HTTPS_PORT = "logviewer.https.port"; /** * Path to the keystore containing the certs used by Storm Logviewer for HTTPS communications */ @isString public static final String LOGVIEWER_HTTPS_KEYSTORE_PATH = "logviewer.https.keystore.path"; /** * Password for the keystore for HTTPS for Storm Logviewer */ @isString public static final String LOGVIEWER_HTTPS_KEYSTORE_PASSWORD = "logviewer.https.keystore.password"; /** * Type of the keystore for HTTPS for Storm Logviewer. * see http://docs.oracle.com/javase/8/docs/api/java/security/KeyStore.html for more details. */ @isString public static final String LOGVIEWER_HTTPS_KEYSTORE_TYPE = "logviewer.https.keystore.type"; /** * Password to the private key in the keystore for setting up HTTPS (SSL). */ @isString public static final String LOGVIEWER_HTTPS_KEY_PASSWORD = "logviewer.https.key.password"; /** * Path to the truststore containing the certs used by Storm Logviewer for HTTPS communications */ @isString public static final String LOGVIEWER_HTTPS_TRUSTSTORE_PATH = "logviewer.https.truststore.path"; /** * Password for the truststore for HTTPS for Storm Logviewer */ @isString public static final String LOGVIEWER_HTTPS_TRUSTSTORE_PASSWORD = "logviewer.https.truststore.password"; /** * Type of the truststore for HTTPS for Storm Logviewer. * see http://docs.oracle.com/javase/8/docs/api/java/security/Truststore.html for more details. */ @isString public static final String LOGVIEWER_HTTPS_TRUSTSTORE_TYPE = "logviewer.https.truststore.type"; /** * Password to the truststore used by Storm Logviewer setting up HTTPS (SSL). */ @isBoolean public static final String LOGVIEWER_HTTPS_WANT_CLIENT_AUTH = "logviewer.https.want.client.auth"; @isBoolean public static final String LOGVIEWER_HTTPS_NEED_CLIENT_AUTH = "logviewer.https.need.client.auth"; /** * A list of users allowed to view logs via the Log Viewer */ @isStringList public static final String LOGS_USERS = "logs.users"; /** * A list of groups allowed to view logs via the Log Viewer */ @isStringList public static final String LOGS_GROUPS = "logs.groups"; /** * Appender name used by log viewer to determine log directory. */ @isString public static final String LOGVIEWER_APPENDER_NAME = "logviewer.appender.name"; /** * Childopts for Storm UI Java process. */ @isStringOrStringList public static final String UI_CHILDOPTS = "ui.childopts"; /** * A class implementing javax.servlet.Filter for authenticating/filtering UI requests */ @isString public static final String UI_FILTER = "ui.filter"; /** * Initialization parameters for the javax.servlet.Filter */ @isMapEntryType(keyType = String.class, valueType = String.class) public static final String UI_FILTER_PARAMS = "ui.filter.params"; /** * The size of the header buffer for the UI in bytes */ @isInteger @isPositiveNumber public static final String UI_HEADER_BUFFER_BYTES = "ui.header.buffer.bytes"; /** * This port is used by Storm DRPC for receiving HTTPS (SSL) DPRC requests from clients. */ @isInteger @isPositiveNumber public static final String UI_HTTPS_PORT = "ui.https.port"; /** * Path to the keystore used by Storm UI for setting up HTTPS (SSL). */ @isString public static final String UI_HTTPS_KEYSTORE_PATH = "ui.https.keystore.path"; /** * Password to the keystore used by Storm UI for setting up HTTPS (SSL). */ @isString public static final String UI_HTTPS_KEYSTORE_PASSWORD = "ui.https.keystore.password"; /** * Type of keystore used by Storm UI for setting up HTTPS (SSL). * see http://docs.oracle.com/javase/7/docs/api/java/security/KeyStore.html for more details. */ @isString public static final String UI_HTTPS_KEYSTORE_TYPE = "ui.https.keystore.type"; /** * Password to the private key in the keystore for setting up HTTPS (SSL). */ @isString public static final String UI_HTTPS_KEY_PASSWORD = "ui.https.key.password"; /** * Path to the truststore used by Storm UI setting up HTTPS (SSL). */ @isString public static final String UI_HTTPS_TRUSTSTORE_PATH = "ui.https.truststore.path"; /** * Password to the truststore used by Storm UI setting up HTTPS (SSL). */ @isString public static final String UI_HTTPS_TRUSTSTORE_PASSWORD = "ui.https.truststore.password"; /** * Type of truststore used by Storm UI for setting up HTTPS (SSL). * see http://docs.oracle.com/javase/7/docs/api/java/security/KeyStore.html for more details. */ @isString public static final String UI_HTTPS_TRUSTSTORE_TYPE = "ui.https.truststore.type"; /** * Password to the truststore used by Storm DRPC setting up HTTPS (SSL). */ @isBoolean public static final String UI_HTTPS_WANT_CLIENT_AUTH = "ui.https.want.client.auth"; @isBoolean public static final String UI_HTTPS_NEED_CLIENT_AUTH = "ui.https.need.client.auth"; /** * The maximum number of threads that should be used by the Pacemaker. * When Pacemaker gets loaded it will spawn new threads, up to * this many total, to handle the load. */ @isNumber @isPositiveNumber public static final String PACEMAKER_MAX_THREADS = "pacemaker.max.threads"; /** * This parameter is used by the storm-deploy project to configure the * jvm options for the pacemaker daemon. */ @isStringOrStringList public static final String PACEMAKER_CHILDOPTS = "pacemaker.childopts"; /** * This port is used by Storm DRPC for receiving HTTP DPRC requests from clients. */ @isInteger public static final String DRPC_HTTP_PORT = "drpc.http.port"; /** * This port is used by Storm DRPC for receiving HTTPS (SSL) DPRC requests from clients. */ @isInteger public static final String DRPC_HTTPS_PORT = "drpc.https.port"; /** * Path to the keystore used by Storm DRPC for setting up HTTPS (SSL). */ @isString public static final String DRPC_HTTPS_KEYSTORE_PATH = "drpc.https.keystore.path"; /** * Password to the keystore used by Storm DRPC for setting up HTTPS (SSL). */ @isString public static final String DRPC_HTTPS_KEYSTORE_PASSWORD = "drpc.https.keystore.password"; /** * Type of keystore used by Storm DRPC for setting up HTTPS (SSL). * see http://docs.oracle.com/javase/7/docs/api/java/security/KeyStore.html for more details. */ @isString public static final String DRPC_HTTPS_KEYSTORE_TYPE = "drpc.https.keystore.type"; /** * Password to the private key in the keystore for setting up HTTPS (SSL). */ @isString public static final String DRPC_HTTPS_KEY_PASSWORD = "drpc.https.key.password"; /** * Path to the truststore used by Storm DRPC setting up HTTPS (SSL). */ @isString public static final String DRPC_HTTPS_TRUSTSTORE_PATH = "drpc.https.truststore.path"; /** * Password to the truststore used by Storm DRPC setting up HTTPS (SSL). */ @isString public static final String DRPC_HTTPS_TRUSTSTORE_PASSWORD = "drpc.https.truststore.password"; /** * Type of truststore used by Storm DRPC for setting up HTTPS (SSL). * see http://docs.oracle.com/javase/7/docs/api/java/security/KeyStore.html for more details. */ @isString public static final String DRPC_HTTPS_TRUSTSTORE_TYPE = "drpc.https.truststore.type"; /** * Password to the truststore used by Storm DRPC setting up HTTPS (SSL). */ @isBoolean public static final String DRPC_HTTPS_WANT_CLIENT_AUTH = "drpc.https.want.client.auth"; @isBoolean public static final String DRPC_HTTPS_NEED_CLIENT_AUTH = "drpc.https.need.client.auth"; /** * Class name for authorization plugin for DRPC client */ @isString public static final String DRPC_AUTHORIZER = "drpc.authorizer"; /** * The timeout on DRPC requests within the DRPC server. Defaults to 10 minutes. Note that requests can also * timeout based on the socket timeout on the DRPC client, and separately based on the topology message * timeout for the topology implementing the DRPC function. */ @isInteger @isPositiveNumber @NotNull public static final String DRPC_REQUEST_TIMEOUT_SECS = "drpc.request.timeout.secs"; /** * Childopts for Storm DRPC Java process. */ @isStringOrStringList public static final String DRPC_CHILDOPTS = "drpc.childopts"; /** * the metadata configured on the supervisor */ @isMapEntryType(keyType = String.class, valueType = String.class) public static final String SUPERVISOR_SCHEDULER_META = "supervisor.scheduler.meta"; /** * A list of ports that can run workers on this supervisor. Each worker uses one port, and * the supervisor will only run one worker per port. Use this configuration to tune * how many workers run on each machine. */ @isNoDuplicateInList @NotNull @isListEntryCustom(entryValidatorClasses={ConfigValidation.IntegerValidator.class,ConfigValidation.PositiveNumberValidator.class}) public static final String SUPERVISOR_SLOTS_PORTS = "supervisor.slots.ports"; /** * What blobstore implementation the supervisor should use. */ @isString public static final String SUPERVISOR_BLOBSTORE = "supervisor.blobstore.class"; /** * The distributed cache target size in MB. This is a soft limit to the size of the distributed * cache contents. */ @isPositiveNumber @isInteger public static final String SUPERVISOR_LOCALIZER_CACHE_TARGET_SIZE_MB = "supervisor.localizer.cache.target.size.mb"; /** * The distributed cache cleanup interval. Controls how often it scans to attempt to cleanup * anything over the cache target size. */ @isPositiveNumber @isInteger public static final String SUPERVISOR_LOCALIZER_CACHE_CLEANUP_INTERVAL_MS = "supervisor.localizer.cleanup.interval.ms"; /** * What blobstore download parallelism the supervisor should use. */ @isPositiveNumber @isInteger public static final String SUPERVISOR_BLOBSTORE_DOWNLOAD_THREAD_COUNT = "supervisor.blobstore.download.thread.count"; /** * Maximum number of retries a supervisor is allowed to make for downloading a blob. */ @isPositiveNumber @isInteger public static final String SUPERVISOR_BLOBSTORE_DOWNLOAD_MAX_RETRIES = "supervisor.blobstore.download.max_retries"; /** * What blobstore implementation nimbus should use. */ @isString public static final String NIMBUS_BLOBSTORE = "nimbus.blobstore.class"; /** * During operations with the blob store, via master, how long a connection * is idle before nimbus considers it dead and drops the session and any * associated connections. */ @isPositiveNumber @isInteger public static final String NIMBUS_BLOBSTORE_EXPIRATION_SECS = "nimbus.blobstore.expiration.secs"; /** * A number representing the maximum number of workers any single topology can acquire. */ @isInteger @isPositiveNumber(includeZero = true) public static final String NIMBUS_SLOTS_PER_TOPOLOGY = "nimbus.slots.perTopology"; /** * A class implementing javax.servlet.Filter for DRPC HTTP requests */ @isString public static final String DRPC_HTTP_FILTER = "drpc.http.filter"; /** * Initialization parameters for the javax.servlet.Filter of the DRPC HTTP * service */ @isMapEntryType(keyType = String.class, valueType = String.class) public static final String DRPC_HTTP_FILTER_PARAMS = "drpc.http.filter.params"; /** * A number representing the maximum number of executors any single topology can acquire. */ @isInteger @isPositiveNumber(includeZero = true) public static final String NIMBUS_EXECUTORS_PER_TOPOLOGY = "nimbus.executors.perTopology"; /** * This parameter is used by the storm-deploy project to configure the * jvm options for the supervisor daemon. */ @isStringOrStringList public static final String SUPERVISOR_CHILDOPTS = "supervisor.childopts"; /** * How many seconds to sleep for before shutting down threads on worker */ @isInteger @isPositiveNumber public static final String SUPERVISOR_WORKER_SHUTDOWN_SLEEP_SECS = "supervisor.worker.shutdown.sleep.secs"; /** * How long a worker can go without heartbeating during the initial launch before * the supervisor tries to restart the worker process. This value override * supervisor.worker.timeout.secs during launch because there is additional * overhead to starting and configuring the JVM on launch. */ @isInteger @isPositiveNumber @NotNull public static final String SUPERVISOR_WORKER_START_TIMEOUT_SECS = "supervisor.worker.start.timeout.secs"; /** * Whether or not the supervisor should launch workers assigned to it. Defaults * to true -- and you should probably never change this value. This configuration * is used in the Storm unit tests. */ @isBoolean public static final String SUPERVISOR_ENABLE = "supervisor.enable"; /** * how often the supervisor sends a heartbeat to the master. */ @isInteger public static final String SUPERVISOR_HEARTBEAT_FREQUENCY_SECS = "supervisor.heartbeat.frequency.secs"; /** * How often the supervisor checks the worker heartbeats to see if any of them * need to be restarted. */ @isInteger @isPositiveNumber public static final String SUPERVISOR_MONITOR_FREQUENCY_SECS = "supervisor.monitor.frequency.secs"; /** * The jvm profiler opts provided to workers launched by this supervisor. */ @isStringOrStringList public static final String WORKER_PROFILER_CHILDOPTS = "worker.profiler.childopts"; /** * Enable profiling of worker JVMs using Oracle's Java Flight Recorder. * Unlocking commercial features requires a special license from Oracle. * See http://www.oracle.com/technetwork/java/javase/terms/products/index.html */ @isBoolean public static final String WORKER_PROFILER_ENABLED = "worker.profiler.enabled"; /** * The command launched supervisor with worker arguments * pid, action and [target_directory] * Where action is - start profile, stop profile, jstack, heapdump and kill against pid * */ @isString public static final String WORKER_PROFILER_COMMAND = "worker.profiler.command"; /** * A list of classes implementing IClusterMetricsConsumer (See storm.yaml.example for exact config format). * Each listed class will be routed cluster related metrics data. * Each listed class maps 1:1 to a ClusterMetricsConsumerExecutor and they're executed in Nimbus. * Only consumers which run in leader Nimbus receives metrics data. */ @isListEntryCustom(entryValidatorClasses = {ConfigValidation.ClusterMetricRegistryValidator.class}) public static final String STORM_CLUSTER_METRICS_CONSUMER_REGISTER = "storm.cluster.metrics.consumer.register"; /** * How often cluster metrics data is published to metrics consumer. */ @NotNull @isPositiveNumber public static final String STORM_CLUSTER_METRICS_CONSUMER_PUBLISH_INTERVAL_SECS = "storm.cluster.metrics.consumer.publish.interval.secs"; /** * Enables user-first classpath. See topology.classpath.beginning */ @isBoolean public static final String STORM_TOPOLOGY_CLASSPATH_BEGINNING_ENABLED="storm.topology.classpath.beginning.enabled"; /** * This value is passed to spawned JVMs (e.g., Nimbus, Supervisor, and Workers) * for the java.library.path value. java.library.path tells the JVM where * to look for native libraries. It is necessary to set this config correctly since * Storm uses the ZeroMQ and JZMQ native libs. */ @isString public static final String JAVA_LIBRARY_PATH = "java.library.path"; /** * The path to use as the zookeeper dir when running a zookeeper server via * "storm dev-zookeeper". This zookeeper instance is only intended for development; * it is not a production grade zookeeper setup. */ @isString public static final String DEV_ZOOKEEPER_PATH = "dev.zookeeper.path"; /** * A map from topology name to the number of machines that should be dedicated for that topology. Set storm.scheduler * to org.apache.storm.scheduler.IsolationScheduler to make use of the isolation scheduler. */ @isMapEntryType(keyType = String.class, valueType = Number.class) public static final String ISOLATION_SCHEDULER_MACHINES = "isolation.scheduler.machines"; /** * A map from the user name to the number of machines that should that user is allowed to use. Set storm.scheduler * to org.apache.storm.scheduler.multitenant.MultitenantScheduler */ @isMapEntryType(keyType = String.class, valueType = Number.class) public static final String MULTITENANT_SCHEDULER_USER_POOLS = "multitenant.scheduler.user.pools"; /** * A map of users to another map of the resource guarantees of the user. Used by Resource Aware Scheduler to ensure * per user resource guarantees. */ @isMapEntryCustom(keyValidatorClasses = {ConfigValidation.StringValidator.class}, valueValidatorClasses = {ConfigValidation.UserResourcePoolEntryValidator.class}) public static final String RESOURCE_AWARE_SCHEDULER_USER_POOLS = "resource.aware.scheduler.user.pools"; /** * The class that specifies the eviction strategy to use in ResourceAwareScheduler */ @NotNull @isImplementationOfClass(implementsClass = IEvictionStrategy.class) public static final String RESOURCE_AWARE_SCHEDULER_EVICTION_STRATEGY = "resource.aware.scheduler.eviction.strategy"; /** * the class that specifies the scheduling priority strategy to use in ResourceAwareScheduler */ @NotNull @isImplementationOfClass(implementsClass = ISchedulingPriorityStrategy.class) public static final String RESOURCE_AWARE_SCHEDULER_PRIORITY_STRATEGY = "resource.aware.scheduler.priority.strategy"; /** * How often nimbus's background thread to sync code for missing topologies should run. */ @isInteger public static final String NIMBUS_CODE_SYNC_FREQ_SECS = "nimbus.code.sync.freq.secs"; /** * The plugin to be used for resource isolation */ @isImplementationOfClass(implementsClass = ResourceIsolationInterface.class) public static final String STORM_RESOURCE_ISOLATION_PLUGIN = "storm.resource.isolation.plugin"; /** * CGroup Setting below */ /** * resources to to be controlled by cgroups */ @isStringList public static final String STORM_CGROUP_RESOURCES = "storm.cgroup.resources"; /** * name for the cgroup hierarchy */ @isString public static final String STORM_CGROUP_HIERARCHY_NAME = "storm.cgroup.hierarchy.name"; /** * flag to determine whether to use a resource isolation plugin * Also determines whether the unit tests for cgroup runs. * If storm.resource.isolation.plugin.enable is set to false the unit tests for cgroups will not run */ @isBoolean public static final String STORM_RESOURCE_ISOLATION_PLUGIN_ENABLE = "storm.resource.isolation.plugin.enable"; /** * root directory for cgoups */ @isString public static String STORM_SUPERVISOR_CGROUP_ROOTDIR = "storm.supervisor.cgroup.rootdir"; /** * the manually set memory limit (in MB) for each CGroup on supervisor node */ @isPositiveNumber public static String STORM_WORKER_CGROUP_MEMORY_MB_LIMIT = "storm.worker.cgroup.memory.mb.limit"; /** * the manually set cpu share for each CGroup on supervisor node */ @isPositiveNumber public static String STORM_WORKER_CGROUP_CPU_LIMIT = "storm.worker.cgroup.cpu.limit"; /** * full path to cgexec command */ @isString public static String STORM_CGROUP_CGEXEC_CMD = "storm.cgroup.cgexec.cmd"; /** * Please use STORM_SUPERVISOR_MEMORY_LIMIT_TOLERANCE_MARGIN_MB instead. The amount of memory a * worker can exceed its allocation before cgroup will kill it. */ @isPositiveNumber(includeZero = true) public static String STORM_CGROUP_MEMORY_LIMIT_TOLERANCE_MARGIN_MB = "storm.cgroup.memory.limit.tolerance.margin.mb"; /** * Java does not always play nicely with cgroups. It is coming but not fully implemented and not * for the way storm uses cgroups. In the short term you can disable the hard memory enforcement * by cgroups and let the supervisor handle shooting workers going over their limit in a kinder * way. */ @isBoolean public static String STORM_CGROUP_MEMORY_ENFORCEMENT_ENABLE = "storm.cgroup.memory.enforcement.enable"; // Configs for memory enforcement done by the supervisor (not cgroups directly) /** * Memory given to each worker for free (because java and storm have some overhead). This is * memory on the box that the workers can use. This should not be included in * SUPERVISOR_MEMORY_CAPACITY_MB, as nimbus does not use this memory for scheduling. */ @isPositiveNumber public static String STORM_SUPERVISOR_MEMORY_LIMIT_TOLERANCE_MARGIN_MB = "storm.supervisor.memory.limit.tolerance.margin.mb"; /** * A multiplier for the memory limit of a worker that will have the supervisor shoot it * immediately. 1.0 means shoot the worker as soon as it goes over. 2.0 means shoot the worker if * its usage is double what was requested. This value is combined with * STORM_SUPERVISOR_HARD_MEMORY_LIMIT_OVERAGE and which ever is greater is used for enforcement. * This allows small workers to not be shot. */ @isPositiveNumber public static String STORM_SUPERVISOR_HARD_MEMORY_LIMIT_MULTIPLIER = "storm.supervisor.hard.memory.limit.multiplier"; /** * If the memory usage of a worker goes over its limit by this value is it shot immediately. This * value is combined with STORM_SUPERVISOR_HARD_LIMIT_MEMORY_MULTIPLIER and which ever is greater * is used for enforcement. This allows small workers to not be shot. */ @isPositiveNumber(includeZero = true) public static String STORM_SUPERVISOR_HARD_LIMIT_MEMORY_OVERAGE_MB = "storm.supervisor.hard.memory.limit.overage.mb"; /** * If the amount of memory that is free in the system (either on the box or in the supervisor's * cgroup) is below this number (in MB) consider the system to be in low memory mode and start * shooting workers if they are over their limit. */ @isPositiveNumber public static String STORM_SUPERVISOR_LOW_MEMORY_THRESHOLD_MB = "storm.supervisor.low.memory.threshold.mb"; /** * If the amount of memory that is free in the system (either on the box or in the supervisor's * cgroup) is below this number (in MB) consider the system to be a little low on memory and start * shooting workers if they are over their limit for a given grace period * STORM_SUPERVISOR_MEDIUM_MEMORY_GRACE_PERIOD_MS. */ @isPositiveNumber public static String STORM_SUPERVISOR_MEDIUM_MEMORY_THRESHOLD_MB = "storm.supervisor.medium.memory.threshold.mb"; /** * The number of milliseconds that a worker is allowed to be over their limit when there is a * medium amount of memory free in the system. */ @isPositiveNumber public static String STORM_SUPERVISOR_MEDIUM_MEMORY_GRACE_PERIOD_MS = "storm.supervisor.medium.memory.grace.period.ms"; // VALIDATION ONLY CONFIGS // Some configs inside Config.java may reference classes we don't want to expose in storm-client, but we still want to validate // That they reference a valid class. To allow this to happen we do part of the validation on the client side with annotations on // static final members of the Config class, and other validations here. We avoid naming them the same thing because clojure code // walks these two classes and creates clojure constants for these values. /** * Server side validation that @{see Config#TOPOLOGY_SCHEDULER_STRATEGY} is set ot a subclass of IStrategy. */ @isImplementationOfClass(implementsClass = IStrategy.class) public static final String VALIDATE_TOPOLOGY_SCHEDULER_STRATEGY = Config.TOPOLOGY_SCHEDULER_STRATEGY; public static String getCgroupRootDir(Map<String, Object> conf) { return (String) conf.get(STORM_SUPERVISOR_CGROUP_ROOTDIR); } public static String getCgroupStormHierarchyDir(Map<String, Object> conf) { return (String) conf.get(Config.STORM_CGROUP_HIERARCHY_DIR); } /** * Get the cgroup resources from the conf * * @param conf the config to read * @return the resources. */ public static ArrayList<String> getCgroupStormResources(Map<String, Object> conf) { ArrayList<String> ret = new ArrayList<>(); for (String entry : ((Iterable<String>) conf.get(DaemonConfig.STORM_CGROUP_RESOURCES))) { ret.add(entry); } return ret; } public static String getCgroupStormHierarchyName(Map<String, Object> conf) { return (String) conf.get(DaemonConfig.STORM_CGROUP_HIERARCHY_NAME); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.cache.query.security; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.Mockito.mock; import java.io.File; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.ObjectStreamException; import java.io.Serializable; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Random; import java.util.Set; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.cache.Cache; import org.apache.geode.internal.cache.InternalCache; import org.apache.geode.test.junit.categories.SecurityTest; @Category(SecurityTest.class) public class JavaBeanAccessorMethodAuthorizerTest { private InternalCache mockCache; private JavaBeanAccessorMethodAuthorizer authorizerWithLangAndIOPackagesSpecified; private RestrictedMethodAuthorizer defaultAuthorizer; private final String LANG_PACKAGE = String.class.getPackage().getName(); private final String IO_PACKAGE = File.class.getPackage().getName(); @Before public void setUp() { mockCache = mock(InternalCache.class); defaultAuthorizer = new RestrictedMethodAuthorizer(mockCache); Set<String> allowedPackages = new HashSet<>(); allowedPackages.add(LANG_PACKAGE); allowedPackages.add(IO_PACKAGE); authorizerWithLangAndIOPackagesSpecified = new JavaBeanAccessorMethodAuthorizer(defaultAuthorizer, allowedPackages); } @Test public void constructorThrowsExceptionWhenCacheIsNull() { assertThatThrownBy(() -> new JavaBeanAccessorMethodAuthorizer((Cache) null, new HashSet<>())) .isInstanceOf(NullPointerException.class) .hasMessage(JavaBeanAccessorMethodAuthorizer.NULL_CACHE_MESSAGE); } @Test public void constructorThrowsExceptionWhenRestrictedMethodAuthorizerIsNull() { assertThatThrownBy(() -> new JavaBeanAccessorMethodAuthorizer((RestrictedMethodAuthorizer) null, new HashSet<>())) .isInstanceOf(NullPointerException.class) .hasMessage(JavaBeanAccessorMethodAuthorizer.NULL_AUTHORIZER_MESSAGE); } @Test public void constructorsThrowsExceptionWhenAllowedPackagesIsNull() { assertThatThrownBy(() -> new JavaBeanAccessorMethodAuthorizer(mockCache, null)) .isInstanceOf(NullPointerException.class) .hasMessage(JavaBeanAccessorMethodAuthorizer.NULL_PACKAGE_MESSAGE); assertThatThrownBy( () -> new JavaBeanAccessorMethodAuthorizer(defaultAuthorizer, null)) .isInstanceOf(NullPointerException.class) .hasMessage(JavaBeanAccessorMethodAuthorizer.NULL_PACKAGE_MESSAGE); } @Test public void authorizeReturnsFalseForKnownDangerousMethods() throws NoSuchMethodException { List<Method> dangerousMethods = new ArrayList<>(); dangerousMethods.add(TestBean.class.getMethod("getClass")); dangerousMethods.add(TestBean.class.getMethod("readResolve")); dangerousMethods.add(TestBean.class.getMethod("readObjectNoData")); dangerousMethods.add(TestBean.class.getMethod("readObject", ObjectInputStream.class)); dangerousMethods.add(TestBean.class.getMethod("writeReplace")); dangerousMethods.add(TestBean.class.getMethod("writeObject", ObjectOutputStream.class)); dangerousMethods.forEach( method -> assertThat( authorizerWithLangAndIOPackagesSpecified.authorize(method, new TestBean())) .isFalse()); } @Test public void authorizeReturnsFalseForDisallowedGeodeClassesWithGeodePackageSpecified() throws NoSuchMethodException { assertThat((TestBean.class.getPackage().getName())) .startsWith(JavaBeanAccessorMethodAuthorizer.GEODE_BASE_PACKAGE); List<Method> geodeMethods = new ArrayList<>(); geodeMethods.add(TestBean.class.getMethod("isMatchingMethod")); geodeMethods.add(TestBean.class.getMethod("getMatchingMethod")); geodeMethods.add(TestBean.class.getMethod("nonMatchingMethod")); Set<String> geodePackage = new HashSet<>(); geodePackage.add(JavaBeanAccessorMethodAuthorizer.GEODE_BASE_PACKAGE); JavaBeanAccessorMethodAuthorizer geodeMatchingAuthorizer = new JavaBeanAccessorMethodAuthorizer(defaultAuthorizer, geodePackage); geodeMethods.forEach( method -> assertThat(geodeMatchingAuthorizer.authorize(method, new TestBean())).isFalse()); } @Test public void authorizeReturnsFalseForMatchingMethodNamesAndNonMatchingPackage() throws NoSuchMethodException { Method getMatchingMethod = List.class.getMethod("get", int.class); Method isMatchingMethod = List.class.getMethod("isEmpty"); assertThat( authorizerWithLangAndIOPackagesSpecified.authorize(isMatchingMethod, new ArrayList())) .isFalse(); assertThat( authorizerWithLangAndIOPackagesSpecified.authorize(getMatchingMethod, new ArrayList())) .isFalse(); } @Test public void authorizeReturnsFalseForNonMatchingMethodNameAndMatchingPackage() throws NoSuchMethodException { Method langMethod = String.class.getMethod("notify"); assertThat(authorizerWithLangAndIOPackagesSpecified.authorize(langMethod, "")).isFalse(); Method ioMethod = File.class.getMethod("notify"); assertThat(authorizerWithLangAndIOPackagesSpecified.authorize(ioMethod, new File(""))) .isFalse(); } @Test public void authorizeReturnsTrueForMatchingMethodNamesAndPackage() throws NoSuchMethodException { Method isMatchingLangMethod = String.class.getMethod("isEmpty"); Method getMatchingLangMethod = String.class.getMethod("getBytes"); assertThat(authorizerWithLangAndIOPackagesSpecified.authorize(isMatchingLangMethod, "")) .isTrue(); assertThat(authorizerWithLangAndIOPackagesSpecified.authorize(getMatchingLangMethod, "")) .isTrue(); Method isMatchingIOMethod = File.class.getMethod("isAbsolute"); Method getMatchingIOMethod = File.class.getMethod("getPath"); assertThat(authorizerWithLangAndIOPackagesSpecified.authorize(isMatchingIOMethod, new File(""))) .isTrue(); assertThat( authorizerWithLangAndIOPackagesSpecified.authorize(getMatchingIOMethod, new File(""))) .isTrue(); } @Test public void authorizeReturnsFalseForNonMatchingDisallowedMethod() throws NoSuchMethodException { Method method = Object.class.getMethod("notify"); assertThat(authorizerWithLangAndIOPackagesSpecified.authorize(method, new Object())).isFalse(); } @Test public void authorizeReturnsTrueForNonMatchingAllowedMethod() throws NoSuchMethodException { Method method = Object.class.getMethod("equals", Object.class); assertThat(authorizerWithLangAndIOPackagesSpecified.authorize(method, new Object())).isTrue(); } @Test public void allowedPackagesIsUnmodifiable() { assertThatThrownBy( () -> authorizerWithLangAndIOPackagesSpecified.getAllowedPackages().remove(LANG_PACKAGE)) .isInstanceOf(UnsupportedOperationException.class); } @SuppressWarnings("unused") private static class TestBean implements Serializable { public Object writeReplace() throws ObjectStreamException { return new TestBean(); } public void writeObject(ObjectOutputStream stream) throws IOException { throw new IOException(); } public Object readResolve() throws ObjectStreamException { return new TestBean(); } public void readObjectNoData() throws ObjectStreamException {} public void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { if (new Random().nextBoolean()) { throw new IOException(); } else { throw new ClassNotFoundException(); } } public void isMatchingMethod() {} public void getMatchingMethod() {} public void nonMatchingMethod() {} } }
/* * $ProjectName$ * $ProjectRevision$ * ----------------------------------------------------------- * $Id: ByteArrayBitInputStream.java,v 1.3 2003/04/10 19:48:31 jarnbjo Exp $ * ----------------------------------------------------------- * * $Author: jarnbjo $ * * Description: * * Copyright 2002-2003 Tor-Einar Jarnbjo * ----------------------------------------------------------- * * Change History * ----------------------------------------------------------- * $Log: ByteArrayBitInputStream.java,v $ * Revision 1.3 2003/04/10 19:48:31 jarnbjo * no message * * Revision 1.2 2003/03/16 01:11:39 jarnbjo * no message * * Revision 1.1 2003/03/03 21:02:20 jarnbjo * no message * */ package net.guerra24.infinity.client.sound.util.io; import java.io.IOException; /** * Implementation of the <code>BitInputStream</code> interface, using a byte * array as data source. */ public class ByteArrayBitInputStream implements BitInputStream { private byte[] source; private byte currentByte; private int endian; private int byteIndex = 0; private int bitIndex = 0; public ByteArrayBitInputStream(byte[] source) { this(source, LITTLE_ENDIAN); } public ByteArrayBitInputStream(byte[] source, int endian) { this.endian = endian; this.source = source; currentByte = source[0]; bitIndex = (endian == LITTLE_ENDIAN) ? 0 : 7; } public boolean getBit() throws IOException { if (endian == LITTLE_ENDIAN) { if (bitIndex > 7) { bitIndex = 0; currentByte = source[++byteIndex]; } return (currentByte & (1 << (bitIndex++))) != 0; } else { if (bitIndex < 0) { bitIndex = 7; currentByte = source[++byteIndex]; } return (currentByte & (1 << (bitIndex--))) != 0; } } public int getInt(int bits) throws IOException { if (bits > 32) { throw new IllegalArgumentException("Argument \"bits\" must be <= 32"); } int res = 0; if (endian == LITTLE_ENDIAN) { for (int i = 0; i < bits; i++) { if (getBit()) { res |= (1 << i); } } } else { if (bitIndex < 0) { bitIndex = 7; currentByte = source[++byteIndex]; } if (bits <= bitIndex + 1) { int ci = ((int) currentByte) & 0xff; int offset = 1 + bitIndex - bits; int mask = ((1 << bits) - 1) << offset; res = (ci & mask) >> offset; bitIndex -= bits; } else { res = (((int) currentByte) & 0xff & ((1 << (bitIndex + 1)) - 1)) << (bits - bitIndex - 1); bits -= bitIndex + 1; currentByte = source[++byteIndex]; while (bits >= 8) { bits -= 8; res |= (((int) source[byteIndex]) & 0xff) << bits; currentByte = source[++byteIndex]; } if (bits > 0) { int ci = ((int) source[byteIndex]) & 0xff; res |= (ci >> (8 - bits)) & ((1 << bits) - 1); bitIndex = 7 - bits; } else { currentByte = source[--byteIndex]; bitIndex = -1; } } } return res; } public int getSignedInt(int bits) throws IOException { int raw = getInt(bits); if (raw >= 1 << (bits - 1)) { raw -= 1 << bits; } return raw; } public int getInt(HuffmanNode root) throws IOException { while (root.value == null) { if (bitIndex > 7) { bitIndex = 0; currentByte = source[++byteIndex]; } root = (currentByte & (1 << (bitIndex++))) != 0 ? root.o1 : root.o0; } return root.value.intValue(); } public long getLong(int bits) throws IOException { if (bits > 64) { throw new IllegalArgumentException("Argument \"bits\" must be <= 64"); } long res = 0; if (endian == LITTLE_ENDIAN) { for (int i = 0; i < bits; i++) { if (getBit()) { res |= (1L << i); } } } else { for (int i = bits - 1; i >= 0; i--) { if (getBit()) { res |= (1L << i); } } } return res; } /** * <p> * reads an integer encoded as "signed rice" as described in the FLAC audio * format specification * </p> * * <p> * <b>not supported for little endian</b> * </p> * * @param order * @return the decoded integer value read from the stream * * @throws IOException * if an I/O error occurs * @throws UnsupportedOperationException * if the method is not supported by the implementation */ public int readSignedRice(int order) throws IOException { int msbs = -1, lsbs = 0, res = 0; if (endian == LITTLE_ENDIAN) { // little endian throw new UnsupportedOperationException( "ByteArrayBitInputStream.readSignedRice() is only supported in big endian mode"); } else { // big endian byte cb = source[byteIndex]; do { msbs++; if (bitIndex < 0) { bitIndex = 7; byteIndex++; cb = source[byteIndex]; } } while ((cb & (1 << bitIndex--)) == 0); int bits = order; if (bitIndex < 0) { bitIndex = 7; byteIndex++; } if (bits <= bitIndex + 1) { int ci = ((int) source[byteIndex]) & 0xff; int offset = 1 + bitIndex - bits; int mask = ((1 << bits) - 1) << offset; lsbs = (ci & mask) >> offset; bitIndex -= bits; } else { lsbs = (((int) source[byteIndex]) & 0xff & ((1 << (bitIndex + 1)) - 1)) << (bits - bitIndex - 1); bits -= bitIndex + 1; byteIndex++; while (bits >= 8) { bits -= 8; lsbs |= (((int) source[byteIndex]) & 0xff) << bits; byteIndex++; } if (bits > 0) { int ci = ((int) source[byteIndex]) & 0xff; lsbs |= (ci >> (8 - bits)) & ((1 << bits) - 1); bitIndex = 7 - bits; } else { byteIndex--; bitIndex = -1; } } res = (msbs << order) | lsbs; } return (res & 1) == 1 ? -(res >> 1) - 1 : (res >> 1); } /** * <p> * fills the array from <code>offset</code> with <code>len</code> integers * encoded as "signed rice" as described in the FLAC audio format * specification * </p> * * <p> * <b>not supported for little endian</b> * </p> * * @param order * @param buffer * @param offset * @param len * @return the decoded integer value read from the stream * * @throws IOException * if an I/O error occurs * @throws UnsupportedOperationException * if the method is not supported by the implementation */ public void readSignedRice(int order, int[] buffer, int off, int len) throws IOException { if (endian == LITTLE_ENDIAN) { // little endian throw new UnsupportedOperationException( "ByteArrayBitInputStream.readSignedRice() is only supported in big endian mode"); } else { // big endian for (int i = off; i < off + len; i++) { int msbs = -1, lsbs = 0; byte cb = source[byteIndex]; do { msbs++; if (bitIndex < 0) { bitIndex = 7; byteIndex++; cb = source[byteIndex]; } } while ((cb & (1 << bitIndex--)) == 0); int bits = order; if (bitIndex < 0) { bitIndex = 7; byteIndex++; } if (bits <= bitIndex + 1) { int ci = ((int) source[byteIndex]) & 0xff; int offset = 1 + bitIndex - bits; int mask = ((1 << bits) - 1) << offset; lsbs = (ci & mask) >> offset; bitIndex -= bits; } else { lsbs = (((int) source[byteIndex]) & 0xff & ((1 << (bitIndex + 1)) - 1)) << (bits - bitIndex - 1); bits -= bitIndex + 1; byteIndex++; while (bits >= 8) { bits -= 8; lsbs |= (((int) source[byteIndex]) & 0xff) << bits; byteIndex++; } if (bits > 0) { int ci = ((int) source[byteIndex]) & 0xff; lsbs |= (ci >> (8 - bits)) & ((1 << bits) - 1); bitIndex = 7 - bits; } else { byteIndex--; bitIndex = -1; } } int res = (msbs << order) | lsbs; buffer[i] = (res & 1) == 1 ? -(res >> 1) - 1 : (res >> 1); } } } public void align() { if (endian == BIG_ENDIAN && bitIndex >= 0) { bitIndex = 7; byteIndex++; } else if (endian == LITTLE_ENDIAN && bitIndex <= 7) { bitIndex = 0; byteIndex++; } } public void setEndian(int endian) { if (this.endian == BIG_ENDIAN && endian == LITTLE_ENDIAN) { bitIndex = 0; byteIndex++; } else if (this.endian == LITTLE_ENDIAN && endian == BIG_ENDIAN) { bitIndex = 7; byteIndex++; } this.endian = endian; } /** * @return the byte array used as a source for this instance */ public byte[] getSource() { return source; } }
/** * Apache License * Version 2.0, January 2004 * http://www.apache.org/licenses/ * * TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION * * 1. Definitions. * * "License" shall mean the terms and conditions for use, reproduction, * and distribution as defined by Sections 1 through 9 of this document. * * "Licensor" shall mean the copyright owner or entity authorized by * the copyright owner that is granting the License. * * "Legal Entity" shall mean the union of the acting entity and all * other entities that control, are controlled by, or are under common * control with that entity. For the purposes of this definition, * "control" means (i) the power, direct or indirect, to cause the * direction or management of such entity, whether by contract or * otherwise, or (ii) ownership of fifty percent (50%) or more of the * outstanding shares, or (iii) beneficial ownership of such entity. * * "You" (or "Your") shall mean an individual or Legal Entity * exercising permissions granted by this License. * * "Source" form shall mean the preferred form for making modifications, * including but not limited to software source code, documentation * source, and configuration files. * * "Object" form shall mean any form resulting from mechanical * transformation or translation of a Source form, including but * not limited to compiled object code, generated documentation, * and conversions to other media types. * * "Work" shall mean the work of authorship, whether in Source or * Object form, made available under the License, as indicated by a * copyright notice that is included in or attached to the work * (an example is provided in the Appendix below). * * "Derivative Works" shall mean any work, whether in Source or Object * form, that is based on (or derived from) the Work and for which the * editorial revisions, annotations, elaborations, or other modifications * represent, as a whole, an original work of authorship. For the purposes * of this License, Derivative Works shall not include works that remain * separable from, or merely link (or bind by name) to the interfaces of, * the Work and Derivative Works thereof. * * "Contribution" shall mean any work of authorship, including * the original version of the Work and any modifications or additions * to that Work or Derivative Works thereof, that is intentionally * submitted to Licensor for inclusion in the Work by the copyright owner * or by an individual or Legal Entity authorized to submit on behalf of * the copyright owner. For the purposes of this definition, "submitted" * means any form of electronic, verbal, or written communication sent * to the Licensor or its representatives, including but not limited to * communication on electronic mailing lists, source code control systems, * and issue tracking systems that are managed by, or on behalf of, the * Licensor for the purpose of discussing and improving the Work, but * excluding communication that is conspicuously marked or otherwise * designated in writing by the copyright owner as "Not a Contribution." * * "Contributor" shall mean Licensor and any individual or Legal Entity * on behalf of whom a Contribution has been received by Licensor and * subsequently incorporated within the Work. * * 2. Grant of Copyright License. Subject to the terms and conditions of * this License, each Contributor hereby grants to You a perpetual, * worldwide, non-exclusive, no-charge, royalty-free, irrevocable * copyright license to reproduce, prepare Derivative Works of, * publicly display, publicly perform, sublicense, and distribute the * Work and such Derivative Works in Source or Object form. * * 3. Grant of Patent License. Subject to the terms and conditions of * this License, each Contributor hereby grants to You a perpetual, * worldwide, non-exclusive, no-charge, royalty-free, irrevocable * (except as stated in this section) patent license to make, have made, * use, offer to sell, sell, import, and otherwise transfer the Work, * where such license applies only to those patent claims licensable * by such Contributor that are necessarily infringed by their * Contribution(s) alone or by combination of their Contribution(s) * with the Work to which such Contribution(s) was submitted. If You * institute patent litigation against any entity (including a * cross-claim or counterclaim in a lawsuit) alleging that the Work * or a Contribution incorporated within the Work constitutes direct * or contributory patent infringement, then any patent licenses * granted to You under this License for that Work shall terminate * as of the date such litigation is filed. * * 4. Redistribution. You may reproduce and distribute copies of the * Work or Derivative Works thereof in any medium, with or without * modifications, and in Source or Object form, provided that You * meet the following conditions: * * (a) You must give any other recipients of the Work or * Derivative Works a copy of this License; and * * (b) You must cause any modified files to carry prominent notices * stating that You changed the files; and * * (c) You must retain, in the Source form of any Derivative Works * that You distribute, all copyright, patent, trademark, and * attribution notices from the Source form of the Work, * excluding those notices that do not pertain to any part of * the Derivative Works; and * * (d) If the Work includes a "NOTICE" text file as part of its * distribution, then any Derivative Works that You distribute must * include a readable copy of the attribution notices contained * within such NOTICE file, excluding those notices that do not * pertain to any part of the Derivative Works, in at least one * of the following places: within a NOTICE text file distributed * as part of the Derivative Works; within the Source form or * documentation, if provided along with the Derivative Works; or, * within a display generated by the Derivative Works, if and * wherever such third-party notices normally appear. The contents * of the NOTICE file are for informational purposes only and * do not modify the License. You may add Your own attribution * notices within Derivative Works that You distribute, alongside * or as an addendum to the NOTICE text from the Work, provided * that such additional attribution notices cannot be construed * as modifying the License. * * You may add Your own copyright statement to Your modifications and * may provide additional or different license terms and conditions * for use, reproduction, or distribution of Your modifications, or * for any such Derivative Works as a whole, provided Your use, * reproduction, and distribution of the Work otherwise complies with * the conditions stated in this License. * * 5. Submission of Contributions. Unless You explicitly state otherwise, * any Contribution intentionally submitted for inclusion in the Work * by You to the Licensor shall be under the terms and conditions of * this License, without any additional terms or conditions. * Notwithstanding the above, nothing herein shall supersede or modify * the terms of any separate license agreement you may have executed * with Licensor regarding such Contributions. * * 6. Trademarks. This License does not grant permission to use the trade * names, trademarks, service marks, or product names of the Licensor, * except as required for reasonable and customary use in describing the * origin of the Work and reproducing the content of the NOTICE file. * * 7. Disclaimer of Warranty. Unless required by applicable law or * agreed to in writing, Licensor provides the Work (and each * Contributor provides its Contributions) on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied, including, without limitation, any warranties or conditions * of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A * PARTICULAR PURPOSE. You are solely responsible for determining the * appropriateness of using or redistributing the Work and assume any * risks associated with Your exercise of permissions under this License. * * 8. Limitation of Liability. In no event and under no legal theory, * whether in tort (including negligence), contract, or otherwise, * unless required by applicable law (such as deliberate and grossly * negligent acts) or agreed to in writing, shall any Contributor be * liable to You for damages, including any direct, indirect, special, * incidental, or consequential damages of any character arising as a * result of this License or out of the use or inability to use the * Work (including but not limited to damages for loss of goodwill, * work stoppage, computer failure or malfunction, or any and all * other commercial damages or losses), even if such Contributor * has been advised of the possibility of such damages. * * 9. Accepting Warranty or Additional Liability. While redistributing * the Work or Derivative Works thereof, You may choose to offer, * and charge a fee for, acceptance of support, warranty, indemnity, * or other liability obligations and/or rights consistent with this * License. However, in accepting such obligations, You may act only * on Your own behalf and on Your sole responsibility, not on behalf * of any other Contributor, and only if You agree to indemnify, * defend, and hold each Contributor harmless for any liability * incurred by, or claims asserted against, such Contributor by reason * of your accepting any such warranty or additional liability. * * END OF TERMS AND CONDITIONS * * APPENDIX: How to apply the Apache License to your work. * * To apply the Apache License to your work, attach the following * boilerplate notice, with the fields enclosed by brackets "{}" * replaced with your own identifying information. (Don't include * the brackets!) The text should be enclosed in the appropriate * comment syntax for the file format. We also recommend that a * file or class name and description of purpose be included on the * same "printed page" as the copyright notice for easier * identification within third-party archives. * * Copyright {yyyy} {name of copyright owner} * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.deleidos.rtws.systemcfg.composer.impl; import java.io.InputStream; import java.net.MalformedURLException; import java.util.ArrayList; import java.util.Enumeration; import java.util.List; import java.util.Properties; import javax.xml.bind.MarshalException; import net.sf.json.JSONObject; import org.apache.log4j.LogManager; import org.apache.log4j.Logger; import com.deleidos.rtws.commons.config.RtwsConfig; import com.deleidos.rtws.commons.config.UserDataProperties; import com.deleidos.rtws.commons.exception.DefaultConfigurationException; import com.deleidos.rtws.commons.exception.InitializationException; import com.deleidos.rtws.commons.jersey.config.JerseyClientConfig; import com.deleidos.rtws.systemcfg.bean.ComposerStatus; import com.deleidos.rtws.systemcfg.bean.Property; import com.deleidos.rtws.systemcfg.bean.SystemContext; import com.deleidos.rtws.systemcfg.composer.DefinitionComposer; import com.deleidos.rtws.systemcfg.exception.ComposerException; import com.deleidos.rtws.systemcfg.userinput.bean.SystemConfig; import com.deleidos.rtws.systemcfg.util.RtwsProperties; import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.WebResource; /** * Generates the default properties needed for all composers, the creation of the RTWSCommon properties file. * * @author LUCECU * */ public class DefaultPropertyComposer implements DefinitionComposer{ private RtwsProperties properties = null; private static Logger logger = LogManager.getLogger(DefaultPropertyComposer.class); /** The Jersey REST client */ private Client client; private String tenantId; private final String DATABASE = "database"; private final String LDAP = "ldap"; private final String ACCOUNTS = "accounts"; private final String EUCA_ONLY_ANCHOR = "anchor-euca-public-ip"; private String EUCA_CLOUD_IP = "UNKNOWN"; @Override public void initialize() throws InitializationException { String templateFile = null; String baseDir = (String) RtwsConfig.getInstance().getConfiguration().getProperty("sysbuilder.template.basedir"); //initialize template file locations //template file property sets, for lookup in composer later templateFile = String.format("%s/%s", baseDir, "default.properties"); RtwsConfig.getInstance().getConfiguration().setProperty("sysbuilder.service.default.properties", templateFile); this.client = Client.create(JerseyClientConfig.getInstance().getInternalConfig()); if (UserDataProperties.getInstance().getString(UserDataProperties.RTWS_STORAGE_ENDPOINT) != null) this.EUCA_CLOUD_IP = UserDataProperties.getInstance().getString(UserDataProperties.RTWS_STORAGE_ENDPOINT).split("/")[2].split(":")[0]; } @Override public void dispose() { //not implemented } @Override public void writeFile(String version, String fileName) { //not implemented } /** * Loads default configuration/templates for composer. */ @Override public void loadDefaults(String version, SystemConfig config, SystemContext systemContext, RtwsProperties properties) throws MarshalException, DefaultConfigurationException { //properties param is null, use this.properties to set new values String file = RtwsConfig.getInstance().getString("sysbuilder.service.default.properties"); this.properties = new RtwsProperties(); //intialize with defaults for system this.properties.put("system.release", config.getSoftwareVersion()); this.properties.put("system.domain", config.getSystemDomain()); if (config.getApplianceDomain() != null) this.properties.put("system.applianceDomain", config.getApplianceDomain()); //create niamod //String tmp [] = config.getDomain().split("."); StringBuilder niamod = new StringBuilder(config.getSystemDomain()); /*for(int i = tmp.length - 1; i >=0; i--){ niamod.append(tmp[i] + "."); }*/ this.properties.put("system.niamod", niamod.reverse().toString()); this.properties.put("system.region", config.getRegion()); //load rest from file with property inheritance try{ InputStream is = getClass().getResourceAsStream(file); this.properties.load(is); is.close(); this.properties.encryptCredentials(config.getSystemDomain()); }catch(Exception e){ e.printStackTrace(); throw new DefaultConfigurationException("Error during composer load defaults", e); } } /** * Generates default properties for RTWSCommon properties. */ @Override public ComposerStatus compose(SystemConfig config, SystemContext systemContext) throws ComposerException{ ComposerStatus status = ComposerStatus.done; try{ generateConnectorProperties(); generateAnchorProperties(config); generateUserProperties(config); generateInterfaceProperties(systemContext); generateProperties(config); scrubProperties(config); } catch(ComposerException ce){ status = ComposerStatus.error; throw ce; } catch(Exception e){ logger.error(e.toString(), e); e.printStackTrace(); status = ComposerStatus.error; throw new ComposerException(e.toString(), e); } return status; } private void scrubProperties(SystemConfig config) { // Remove those properties not applicable for appliance systems String [] scrubPrefix = {"webapp.dnsapi", "webapp.caapi", "webapp.scheduleapi", "webapp.repository", "webapp.accounts", "webapp.authapi", "webapp.ingestapi", "webapp.tenantapi", "webapp.dbapi"}; List<String> scrubList = new ArrayList<String>(); if (config.getApplianceEnabled()) { Enumeration<Object> keys = this.properties.keys(); while(keys.hasMoreElements()) { String key = (String) keys.nextElement(); for (String scrub : scrubPrefix) { if (key.startsWith(scrub)) scrubList.add(key); } } for (String scrub : scrubList) { this.properties.remove(scrub); logger.info(String.format("Removed propery not required for appliance {%s}: {%s}", scrub, config.getApplianceDomain())); } } } private void generateConnectorProperties(){ this.properties.put("rtws.default.webapp.port", RtwsConfig.getInstance().getString("rtws.default.webapp.port")); this.properties.put("rtws.internal.webapp.port", RtwsConfig.getInstance().getString("rtws.internal.webapp.port")); this.properties.put("rtws.internal.webapp.unsecured.port", RtwsConfig.getInstance().getString("rtws.internal.webapp.unsecured.port")); } private void generateUserProperties(SystemConfig config){ if(config.getProperties() != null){ for(Property property : config.getProperties()){ this.properties.put(property.getName(), property.getValue()); } } } private void generateAnchorProperties(SystemConfig config) throws MalformedURLException, ComposerException{ JSONObject anchor = getAnchorNodeInformation(this.tenantId); //h2.connection.url=jdbc:h2:tcp://auth-tms-dev.deleidos.com:8161/commondb //h2.dim.connection.url=jdbc:h2:tcp://auth-tms-dev.deleidos.com:8161/commondb;SCHEMA_SEARCH_PATH=DIMENSIONS //h2.app.connection.url=jdbc:h2:tcp://auth-tms-dev.deleidos.com:8161/commondb;SCHEMA_SEARCH_PATH=APPLICATION //ldap.provider.url=ldap://auth-tms-dev.deleidos.com:10389 //webapp.accounts.url.host=auth-tms-dev.deleidos.com if (config.getApplianceEnabled()) { this.properties.put("h2.connection.url", String.format("jdbc:h2:tcp://%s:8161/commondb;MULTI_THREADED=1;MAX_OPERATION_MEMORY=268435456;MAX_COMPACT_TIME=60000", "APPLIANCE_DB_HOST")); this.properties.put("h2.dim.connection.url", String.format("jdbc:h2:tcp://%s:8161/commondb;SCHEMA_SEARCH_PATH=DIMENSIONS;MULTI_THREADED=1;MAX_OPERATION_MEMORY=268435456;MAX_COMPACT_TIME=60000", "APPLIANCE_DB_HOST")); this.properties.put("h2.app.connection.url", String.format("jdbc:h2:tcp://%s:8161/commondb;SCHEMA_SEARCH_PATH=APPLICATION;MULTI_THREADED=1;MAX_OPERATION_MEMORY=268435456;MAX_COMPACT_TIME=60000", "APPLIANCE_DB_HOST")); } else { this.properties.put("h2.connection.url", String.format("jdbc:h2:tcp://%s:8161/commondb;MULTI_THREADED=1;LOCK_TIMEOUT=300000;MAX_OPERATION_MEMORY=268435456;MAX_COMPACT_TIME=60000", anchor.getString(DATABASE))); this.properties.put("h2.dim.connection.url", String.format("jdbc:h2:tcp://%s:8161/commondb;SCHEMA_SEARCH_PATH=DIMENSIONS;MULTI_THREADED=1;LOCK_TIMEOUT=300000;MAX_OPERATION_MEMORY=268435456;MAX_COMPACT_TIME=60000", anchor.getString(DATABASE))); this.properties.put("h2.app.connection.url", String.format("jdbc:h2:tcp://%s:8161/commondb;SCHEMA_SEARCH_PATH=APPLICATION;MULTI_THREADED=1;LOCK_TIMEOUT=300000;MAX_OPERATION_MEMORY=268435456;MAX_COMPACT_TIME=60000", anchor.getString(DATABASE))); } this.properties.put("ldap.provider.url", String.format("ldap://%s:10389", anchor.getString(LDAP))); this.properties.put("webapp.accounts.url.host", anchor.getString(ACCOUNTS)); this.properties.put("webapp.gatewayapi.url.host", anchor.getString(ACCOUNTS)); //this.properties.put("tenant.dns.url", String.format("http://%s/dnsapi", anchor.getString(DATABASE))); //case for euca only, when external dns is not enabled public IP is used for external access if("EUC".equals(config.getServiceProvider()) && config.getExternalDnsEnabled() == false){ this.properties.put("cas.service.url.host", anchor.getString(EUCA_ONLY_ANCHOR)); } else{ this.properties.put("cas.service.url.host", anchor.getString(ACCOUNTS)); } //gateway/anchor host this.properties.put("tenant.gateway.host", anchor.getString(DATABASE)); if("EUC".equals(config.getServiceProvider())) { this.properties.put("ntp.server", EUCA_CLOUD_IP); } else { this.properties.put("ntp.server", "0.amazon.pool.ntp.org,1.amazon.pool.ntp.org,2.amazon.pool.ntp.org,3.amazon.pool.ntp.org"); } } /** * Get dimension table information from Tenant API. Builds URL to Dimension table in database. Creates dimension table * datasource. * Default access for unit testing * * @param tenantId String value * @return List<Object> * @throws MalformedURLException * @throws ComposerException */ JSONObject getAnchorNodeInformation(String tenantId) throws MalformedURLException, ComposerException{ String url = String.format("%s%s%s", RtwsConfig.getInstance().getString("webapp.tenantapi.url.path"), "/json/instance/anchor/", tenantId); WebResource resource = this.client.resource(url); String response = resource.get(String.class); logger.debug(resource.getURI().toURL().toString()); JSONObject anchor = JSONObject.fromObject(response); logger.debug(anchor.toString(4)); if(anchor.getString("ldap") == null){ throw new ComposerException("Tenant does not have a anchor instance or with an incorrect anchor id stored in the database"); } else if(anchor.getString("database") == null){ throw new ComposerException("Tenant does not have a anchor instance or with an incorrect anchor id stored in the database"); } else{ //no more checks to perform on the returned object } return anchor; } private void generateInterfaceProperties(SystemContext systemContext) { String tempValue; int port; tempValue = this.properties.getProperty("rtws.path.home"); this.properties.put("rtws.interfaces.config.path", String.format("%s/%s", tempValue, "conf")); // Service Interface if((tempValue = systemContext.getAvailabilityZone().getPropertiesFile()) != null) { this.properties.put("rtws.service.interface.propertiesFile", tempValue); } if((tempValue = systemContext.getAvailabilityZone().getServiceEndpoint().getURLString()) != null) { this.properties.put("rtws.service.interface.endpoint", tempValue); } // Storage Interface if((tempValue = systemContext.getAvailabilityZone().getPropertiesFile()) != null) { this.properties.put("rtws.storage.interface.propertiesFile", tempValue); } if((tempValue = systemContext.getAvailabilityZone().getStorageEndpoint().getEndpoint()) != null) { this.properties.put("rtws.storage.interface.endpoint", tempValue); } if((port = systemContext.getAvailabilityZone().getStorageEndpoint().getPort()) != 0) { this.properties.put("rtws.storage.interface.portNumber", String.valueOf(port)); } if((tempValue = systemContext.getAvailabilityZone().getStorageEndpoint().getVirtualpath()) != null) { if (tempValue.trim().length() > 0) { this.properties.put("rtws.storage.interface.virtualPath", tempValue); } } } private void generateProperties(SystemConfig config){ UserDataProperties udProps = UserDataProperties.getInstance(); String domain = udProps.getString(UserDataProperties.RTWS_DOMAIN); String defaultNodeFQDN = "default." + domain; this.properties.put("webapp.repository.tenant.manager.url.host", defaultNodeFQDN); if (config.getApplianceEnabled()) { this.properties.put("rtws.cloud.provider", "APPLIANCE"); } else { this.properties.put("rtws.cloud.provider", config.getServiceProvider()); } this.properties.put("device.name.prefix", RtwsConfig.getInstance().getString("device.name.prefix")); if (config.getVpcEnabled()) { this.properties.put("rtws.vpc.enabled", Boolean.TRUE.toString()); } //used for A&A this.properties.put("rtws.tmsTenantId", UserDataProperties.getInstance().getString(UserDataProperties.RTWS_TENANT_ID)); // Let apps know whether or not the system is an auto scaling one this.properties.put("can.scale", Boolean.toString(config.getAutoScale())); } /** * Set the tenant id. * * @param tenantId String value */ public void setTenantId(String tenantId){ this.tenantId = tenantId; } /** * Get properties. */ @Override public Properties getProperties() { return properties; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gobblin.util; import java.io.FileNotFoundException; import java.io.IOException; import java.util.Comparator; import java.util.List; import org.apache.commons.lang.ArrayUtils; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; import com.google.common.primitives.Longs; /** * Utility class for listing files on a {@link FileSystem}. * * @see FileSystem */ public class FileListUtils { private static final Logger LOG = LoggerFactory.getLogger(FileListUtils.class); public static final Comparator<FileStatus> LATEST_MOD_TIME_ORDER = new Comparator<FileStatus>() { @Override public int compare(FileStatus file1, FileStatus file2) { return Longs.compare(Long.valueOf(file2.getModificationTime()), Long.valueOf(file1.getModificationTime())); } }; public static final PathFilter NO_OP_PATH_FILTER = new PathFilter() { @Override public boolean accept(Path path) { return true; } }; public static List<FileStatus> listFilesRecursively(FileSystem fs, Path path) throws IOException { return listFilesRecursively(fs, path, NO_OP_PATH_FILTER); } public static List<FileStatus> listFilesRecursively(FileSystem fs, Iterable<Path> paths) throws IOException { List<FileStatus> results = Lists.newArrayList(); for (Path path : paths) { results.addAll(listFilesRecursively(fs, path)); } return results; } /** * Given a path to copy, list all files rooted at the given path to copy * * @param fs the file system of the path * @param path root path to copy * @param fileFilter a filter only applied to root * @param includeEmptyDirectories a control to include empty directories for copy */ public static List<FileStatus> listFilesToCopyAtPath(FileSystem fs, Path path, PathFilter fileFilter, boolean includeEmptyDirectories) throws IOException { List<FileStatus> files = Lists.newArrayList(); FileStatus rootFile = fs.getFileStatus(path); listFilesRecursivelyHelper(fs, files, rootFile, fileFilter, false, includeEmptyDirectories); // Copy the empty root directory if (files.size() == 0 && rootFile.isDirectory() && includeEmptyDirectories) { files.add(rootFile); } return files; } /** * Helper method to list out all files under a specified path. The specified {@link PathFilter} is treated as a file * filter, that is it is only applied to file {@link Path}s. */ public static List<FileStatus> listFilesRecursively(FileSystem fs, Path path, PathFilter fileFilter) throws IOException { return listFilesRecursively(fs, path, fileFilter, false); } /** * Helper method to list out all files under a specified path. If applyFilterToDirectories is false, the supplied * {@link PathFilter} will only be applied to files. */ public static List<FileStatus> listFilesRecursively(FileSystem fs, Path path, PathFilter fileFilter, boolean applyFilterToDirectories) throws IOException { return listFilesRecursivelyHelper(fs, Lists.newArrayList(), fs.getFileStatus(path), fileFilter, applyFilterToDirectories, false); } private static List<FileStatus> listFilesRecursivelyHelper(FileSystem fs, List<FileStatus> files, FileStatus fileStatus, PathFilter fileFilter, boolean applyFilterToDirectories, boolean includeEmptyDirectories) throws FileNotFoundException, IOException { if (fileStatus.isDirectory()) { for (FileStatus status : fs.listStatus(fileStatus.getPath(), applyFilterToDirectories ? fileFilter : NO_OP_PATH_FILTER)) { if (status.isDirectory()) { // Number of files collected before diving into the directory int numFilesBefore = files.size(); listFilesRecursivelyHelper(fs, files, status, fileFilter, applyFilterToDirectories, includeEmptyDirectories); // Number of files collected after diving into the directory int numFilesAfter = files.size(); if (numFilesAfter == numFilesBefore && includeEmptyDirectories) { /* * This is effectively an empty directory, which needs explicit copying. Has there any data file * in the directory, the directory would be created as a side-effect of copying the data file */ files.add(status); } } else { listFilesRecursivelyHelper(fs, files, status, fileFilter, applyFilterToDirectories, includeEmptyDirectories); } } } else if (fileFilter.accept(fileStatus.getPath())) { files.add(fileStatus); } return files; } /** * Method to list out all files, or directory if no file exists, under a specified path. */ public static List<FileStatus> listMostNestedPathRecursively(FileSystem fs, Path path) throws IOException { return listMostNestedPathRecursively(fs, path, NO_OP_PATH_FILTER); } public static List<FileStatus> listMostNestedPathRecursively(FileSystem fs, Iterable<Path> paths) throws IOException { List<FileStatus> results = Lists.newArrayList(); for (Path path : paths) { results.addAll(listMostNestedPathRecursively(fs, path)); } return results; } /** * Method to list out all files, or directory if no file exists, under a specified path. * The specified {@link PathFilter} is treated as a file filter, that is it is only applied to file {@link Path}s. */ public static List<FileStatus> listMostNestedPathRecursively(FileSystem fs, Path path, PathFilter fileFilter) throws IOException { return listMostNestedPathRecursivelyHelper(fs, Lists.<FileStatus> newArrayList(), fs.getFileStatus(path), fileFilter); } private static List<FileStatus> listMostNestedPathRecursivelyHelper(FileSystem fs, List<FileStatus> files, FileStatus fileStatus, PathFilter fileFilter) throws IOException { if (fileStatus.isDirectory()) { FileStatus[] curFileStatus = fs.listStatus(fileStatus.getPath()); if (ArrayUtils.isEmpty(curFileStatus)) { files.add(fileStatus); } else { for (FileStatus status : curFileStatus) { listMostNestedPathRecursivelyHelper(fs, files, status, fileFilter); } } } else if (fileFilter.accept(fileStatus.getPath())) { files.add(fileStatus); } return files; } /** * Helper method to list out all paths under a specified path. If the {@link org.apache.hadoop.fs.FileSystem} is * unable to list the contents of a relevant directory, will log an error and skip. */ public static List<FileStatus> listPathsRecursively(FileSystem fs, Path path, PathFilter fileFilter) throws IOException { return listPathsRecursivelyHelper(fs, Lists.<FileStatus> newArrayList(), fs.getFileStatus(path), fileFilter); } private static List<FileStatus> listPathsRecursivelyHelper(FileSystem fs, List<FileStatus> files, FileStatus fileStatus, PathFilter fileFilter) { if (fileFilter.accept(fileStatus.getPath())) { files.add(fileStatus); } if (fileStatus.isDirectory()) { try { for (FileStatus status : fs.listStatus(fileStatus.getPath())) { listPathsRecursivelyHelper(fs, files, status, fileFilter); } } catch (IOException ioe) { LOG.error("Could not list contents of path " + fileStatus.getPath()); } } return files; } }
/* * Copyright 2018 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.texttospeech.v1beta1.stub; import com.google.api.core.ApiFunction; import com.google.api.core.BetaApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.GrpcTransportChannel; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.cloud.texttospeech.v1beta1.ListVoicesRequest; import com.google.cloud.texttospeech.v1beta1.ListVoicesResponse; import com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest; import com.google.cloud.texttospeech.v1beta1.SynthesizeSpeechResponse; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import java.io.IOException; import java.util.List; import javax.annotation.Generated; import org.threeten.bp.Duration; // AUTO-GENERATED DOCUMENTATION AND CLASS /** * Settings class to configure an instance of {@link TextToSpeechStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (texttospeech.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. For * example, to set the total timeout of listVoices to 30 seconds: * * <pre> * <code> * TextToSpeechStubSettings.Builder textToSpeechSettingsBuilder = * TextToSpeechStubSettings.newBuilder(); * textToSpeechSettingsBuilder.listVoicesSettings().getRetrySettings().toBuilder() * .setTotalTimeout(Duration.ofSeconds(30)); * TextToSpeechStubSettings textToSpeechSettings = textToSpeechSettingsBuilder.build(); * </code> * </pre> */ @Generated("by gapic-generator") @BetaApi public class TextToSpeechStubSettings extends StubSettings<TextToSpeechStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build(); private final UnaryCallSettings<ListVoicesRequest, ListVoicesResponse> listVoicesSettings; private final UnaryCallSettings<SynthesizeSpeechRequest, SynthesizeSpeechResponse> synthesizeSpeechSettings; /** Returns the object with the settings used for calls to listVoices. */ public UnaryCallSettings<ListVoicesRequest, ListVoicesResponse> listVoicesSettings() { return listVoicesSettings; } /** Returns the object with the settings used for calls to synthesizeSpeech. */ public UnaryCallSettings<SynthesizeSpeechRequest, SynthesizeSpeechResponse> synthesizeSpeechSettings() { return synthesizeSpeechSettings; } @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public TextToSpeechStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcTextToSpeechStub.create(this); } else { throw new UnsupportedOperationException( "Transport not supported: " + getTransportChannelProvider().getTransportName()); } } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ public static String getDefaultEndpoint() { return "texttospeech.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder().setScopesToApply(DEFAULT_SERVICE_SCOPES); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return InstantiatingGrpcChannelProvider.newBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultGrpcTransportProviderBuilder().build(); } @BetaApi("The surface for customizing headers is not stable yet and may change in the future.") public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(TextToSpeechStubSettings.class)) .setTransportToken( GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected TextToSpeechStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); listVoicesSettings = settingsBuilder.listVoicesSettings().build(); synthesizeSpeechSettings = settingsBuilder.synthesizeSpeechSettings().build(); } /** Builder for TextToSpeechStubSettings. */ public static class Builder extends StubSettings.Builder<TextToSpeechStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final UnaryCallSettings.Builder<ListVoicesRequest, ListVoicesResponse> listVoicesSettings; private final UnaryCallSettings.Builder<SynthesizeSpeechRequest, SynthesizeSpeechResponse> synthesizeSpeechSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "idempotent", ImmutableSet.copyOf( Lists.<StatusCode.Code>newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); definitions.put("non_idempotent", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList())); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(100L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelay(Duration.ofMillis(60000L)) .setInitialRpcTimeout(Duration.ofMillis(20000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ofMillis(20000L)) .setTotalTimeout(Duration.ofMillis(600000L)) .build(); definitions.put("default", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this((ClientContext) null); } protected Builder(ClientContext clientContext) { super(clientContext); listVoicesSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); synthesizeSpeechSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( listVoicesSettings, synthesizeSpeechSettings); initDefaults(this); } private static Builder createDefault() { Builder builder = new Builder((ClientContext) null); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .listVoicesSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default")); builder .synthesizeSpeechSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default")); return builder; } protected Builder(TextToSpeechStubSettings settings) { super(settings); listVoicesSettings = settings.listVoicesSettings.toBuilder(); synthesizeSpeechSettings = settings.synthesizeSpeechSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( listVoicesSettings, synthesizeSpeechSettings); } // NEXT_MAJOR_VER: remove 'throws Exception' /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) throws Exception { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to listVoices. */ public UnaryCallSettings.Builder<ListVoicesRequest, ListVoicesResponse> listVoicesSettings() { return listVoicesSettings; } /** Returns the builder for the settings used for calls to synthesizeSpeech. */ public UnaryCallSettings.Builder<SynthesizeSpeechRequest, SynthesizeSpeechResponse> synthesizeSpeechSettings() { return synthesizeSpeechSettings; } @Override public TextToSpeechStubSettings build() throws IOException { return new TextToSpeechStubSettings(this); } } }
import processing.core.*; import processing.data.*; import processing.opengl.*; import oscP5.*; import netP5.*; import ddf.minim.*; import org.tritonus.share.midi.*; import org.tritonus.sampled.file.*; import javazoom.jl.player.advanced.*; import org.tritonus.share.*; import ddf.minim.*; import ddf.minim.analysis.*; import netP5.*; import org.tritonus.share.sampled.*; import javazoom.jl.converter.*; import javazoom.spi.mpeg.sampled.file.tag.*; import org.tritonus.share.sampled.file.*; import javazoom.spi.mpeg.sampled.convert.*; import ddf.minim.javasound.*; import oscP5.*; import javazoom.spi.*; import org.tritonus.share.sampled.mixer.*; import javazoom.jl.decoder.*; import processing.xml.*; import processing.core.*; import org.tritonus.share.sampled.convert.*; import ddf.minim.spi.*; import ddf.minim.effects.*; import javazoom.spi.mpeg.sampled.file.*; import ddf.minim.signals.*; import javazoom.jl.player.*; import java.applet.*; import java.awt.Dimension; import java.awt.Frame; import java.awt.event.MouseEvent; import java.awt.event.KeyEvent; import java.awt.event.FocusEvent; import java.awt.Image; import java.io.*; import java.net.*; import java.text.*; import java.util.*; import java.util.zip.*; import java.util.regex.*; public class Visualizer extends PApplet { // // Visualizer // Ilias Karim // Music 250A, CCRMA, Stanford University // //import processing.core.PGraphics3D; //import processing.core.*; //import processing.opengl.*; int oscPort = 57121; // numbers of boids and attractors are hard-coded in GridRenderer :( //float[][] boids = new float[16][2]; // doubly hard-coded in GridRenderer //float[][] attractors = new float[9][2]; OscP5 oscP5;// = new OscP5(this, oscPort); Minim minim = new Minim(this); AudioSource source; GridRenderer gridRenderer; int select; public void setup() { oscP5 = new OscP5(this, oscPort); size(1024, 708); //minim = new Minim(this); source = minim.getLineIn(); gridRenderer = new GridRenderer(source); source.addListener(gridRenderer); } public void draw() { gridRenderer.draw(); } public void oscEvent(OscMessage msg) { String pattern = msg.addrPattern(); // // parse visualization control messages from PD // if (pattern.equals("/radius")) { int val = msg.get(0).intValue(); gridRenderer.r = val; } else if (pattern.equals("/rgb")) { float rVal = msg.get(0).intValue() / 128.f; float gVal = msg.get(1).intValue() / 128.f; float bVal = msg.get(2).intValue() / 128.f; gridRenderer.setRGB(rVal, gVal, bVal); } else if (pattern.equals("/intensity")) { gridRenderer.alpha = msg.get(0).floatValue(); } else if (pattern.equals("/mode")) { gridRenderer.setMode(msg.get(0).intValue()); } // // parse ... control messages from Python // else if (pattern.equals("/boid")) { int i = PApplet.parseInt(msg.get(0).intValue()); print("\n" + i + "\n"); gridRenderer.boids[i][0] = msg.get(1).floatValue(); gridRenderer.boids[i][1] = msg.get(2).floatValue(); //gridRenderer.boids = boids; } /* else if (pattern.equals("/attractor")) { attractors[msg.get(0).intValue()][0] = msg.get(1).floatValue(); attractors[msg.get(0).intValue()][1] = msg.get(1).floatValue(); }*/ // debug //print(msg); } public void stop() { source.close(); minim.stop(); super.stop(); } /// abstract class for audio visualization abstract class AudioRenderer implements AudioListener { float[] left; float[] right; public synchronized void samples(float[] samp) { left = samp; } public synchronized void samples(float[] sampL, float[] sampR) { left = sampL; right = sampR; } public abstract void setup(); public abstract void draw(); } // abstract class for FFT visualization abstract class FourierRenderer extends AudioRenderer { FFT fft; float maxFFT; float[] leftFFT; float[] rightFFT; FourierRenderer(AudioSource source) { float gain = .1f; fft = new FFT(source.bufferSize(), source.sampleRate()); maxFFT = source.sampleRate() / source.bufferSize() * gain; fft.window(FFT.HAMMING); } public void calc(int bands) { if(left != null) { leftFFT = new float[bands]; fft.linAverages(bands); fft.forward(left); for(int i = 0; i < bands; i++) leftFFT[i] = fft.getAvg(i); } } } class GridRenderer extends FourierRenderer { int SquareMode = 1; int DiamondMode = 0; float[][] boids = new float[19][2]; // radius int r = 20; // "squeeze" float squeeze = .5f; // color scale float colorScale = 40; float val[]; float factor = 1; float factorAlpha = 0; GridRenderer(AudioSource source) { super(source); //val = new float[ceil(sqrt(2) * r)]; } public void setup() { colorMode(RGB, colorScale, colorScale, colorScale); //setRGB(1, 1, 1); } int mode; public void setMode(int myMode) { mode = myMode; print("setMode: " + mode + "\n"); } // color float rgb[] = { 1, 1, 1 }; float _rgb[] = { 0, 0, 0 }; public void setRGB(float r, float g, float b) { rgb[0] = r; rgb[1] = g; rgb[2] = b; print("set RGB: (" + r + ", " + g + ", " + b + ")\n"); } float diamondTileAlpha = 0; float alpha = 1; float _alpha = 1; public void draw() { if (left != null) { val = new float[ceil(sqrt(2) * r)]; super.calc(val.length); // interpolate values for (int i=0; i<val.length; i++) { val[i] = lerp(val[i], pow(leftFFT[i], squeeze), .1f); } background(0); float tileWidth = width / (2*r + 1); float tileHeight = height / (2*r + 1); if (mode == 0) { if (factor < 2) { factor += .04f; } else { factor = 2; } if (diamondTileAlpha < 1) { diamondTileAlpha += .02f; } } else { if (diamondTileAlpha > 0) { diamondTileAlpha -= .02f; } else if (factor > 1) { factor -= .04f; } else { factor = 1; } } _rgb[0] = lerp(_rgb[0], rgb[0], .01f); _rgb[1] = lerp(_rgb[1], rgb[1], .01f); _rgb[2] = lerp(_rgb[2], rgb[2], .01f); _alpha = lerp(_alpha, alpha, .1f); for (int x = -r; x < r + 2; x++) { for (int z = -r; z < r + 2; z++) { int index = (int)dist(x, z, 0, 0); if (index >= val.length) index = val.length - 1; float c = 256 * val[index]; fill(c * _rgb[0] * _alpha, c * _rgb[1] * _alpha, c * _rgb[2] * _alpha); float x0 = width / 2 + (tileWidth * (x - .5f)); float x1 = x0 + tileWidth; float y0 = height / 2 + (tileHeight * (z - .5f)); float y1 = y0 + tileHeight; x0 -= tileWidth / 2; x1 -= tileWidth / 2; y0 -= tileHeight / 2; y1 -= tileHeight / 2; float avg; avg = (dist(x, z, 0, 0) + dist(x, z + 1, 0, 0) + dist(x + 1, z, 0, 0) + dist(x + 1, z + 1, 0, 0)) / 4; if (avg >= val.length) avg = val.length - 1; c = 256 * val[(int)avg] * diamondTileAlpha; float bonus = 1; /* if (random(0, 100) > 99) bonus = random(1, 2); */ fill(c * _rgb[0] * _alpha * bonus, c * _rgb[1] * _alpha * bonus, c * _rgb[2] * _alpha * bonus); //fill(1, 1, 1, 0); for (int i = 0; i < 19; i++) { if ((int)((boids[i][0] -.5f) * r * 2) == x && (int)((boids[i][1] - .5f) * r * 2) == z) { //print ((boids[i][0] * width) + " " + (boids[i][1] * height) + "\n"); fill(256, 256, 256, 256); //print("EUREKA"); } } quad(x0 + tileWidth / factor, y0, x0, y1 - tileHeight / factor, x1 - tileWidth / factor, y1, x1, y0 + tileHeight / factor); if (factor == 2) // diamond quad(x0 + tileWidth / factor + tileWidth / factor, y0 + tileHeight / factor, x0 + tileWidth / factor, y1 - tileHeight / factor + tileHeight / factor, x1 - tileWidth / factor + tileWidth / factor, y1 + tileHeight / factor, x1 + tileWidth / factor, y0 + tileHeight / factor + tileHeight / factor); } } } } } static public void main(String[] passedArgs) { String[] appletArgs = new String[] { "--full-screen", "--bgcolor=#666666", "--hide-stop", "Visualizer" }; if (passedArgs != null) { PApplet.main(concat(appletArgs, passedArgs)); } else { PApplet.main(appletArgs); } } }
package org.semanticweb.elk.reasoner.saturation.rules; /* * #%L * ELK Reasoner * $Id:$ * $HeadURL:$ * %% * Copyright (C) 2011 - 2012 Department of Computer Science, University of Oxford * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.semanticweb.elk.reasoner.indexing.hierarchy.IndexedClassExpression; import org.semanticweb.elk.reasoner.indexing.hierarchy.IndexedObjectComplementOf; import org.semanticweb.elk.reasoner.indexing.hierarchy.IndexedObjectIntersectionOf; import org.semanticweb.elk.reasoner.indexing.hierarchy.IndexedObjectSomeValuesFrom; import org.semanticweb.elk.reasoner.saturation.conclusions.interfaces.BackwardLink; import org.semanticweb.elk.reasoner.saturation.conclusions.interfaces.ContextInitialization; import org.semanticweb.elk.reasoner.saturation.conclusions.interfaces.Contradiction; import org.semanticweb.elk.reasoner.saturation.conclusions.interfaces.DisjointSubsumer; import org.semanticweb.elk.reasoner.saturation.conclusions.interfaces.ForwardLink; import org.semanticweb.elk.reasoner.saturation.conclusions.interfaces.Propagation; import org.semanticweb.elk.reasoner.saturation.conclusions.interfaces.SubContextInitialization; import org.semanticweb.elk.reasoner.saturation.context.ContextPremises; import org.semanticweb.elk.reasoner.saturation.rules.backwardlinks.BackwardLinkChainFromBackwardLinkRule; import org.semanticweb.elk.reasoner.saturation.rules.backwardlinks.ContradictionOverBackwardLinkRule; import org.semanticweb.elk.reasoner.saturation.rules.backwardlinks.SubsumerBackwardLinkRule; import org.semanticweb.elk.reasoner.saturation.rules.contextinit.OwlThingContextInitRule; import org.semanticweb.elk.reasoner.saturation.rules.contextinit.RootContextInitializationRule; import org.semanticweb.elk.reasoner.saturation.rules.contradiction.ContradictionPropagationRule; import org.semanticweb.elk.reasoner.saturation.rules.disjointsubsumer.ContradicitonCompositionRule; import org.semanticweb.elk.reasoner.saturation.rules.forwardlink.BackwardLinkFromForwardLinkRule; import org.semanticweb.elk.reasoner.saturation.rules.forwardlink.NonReflexiveBackwardLinkCompositionRule; import org.semanticweb.elk.reasoner.saturation.rules.forwardlink.ReflexiveBackwardLinkCompositionRule; import org.semanticweb.elk.reasoner.saturation.rules.propagations.NonReflexivePropagationRule; import org.semanticweb.elk.reasoner.saturation.rules.propagations.ReflexivePropagationRule; import org.semanticweb.elk.reasoner.saturation.rules.subcontextinit.PropagationInitializationRule; import org.semanticweb.elk.reasoner.saturation.rules.subsumers.ContradictionFromDisjointnessRule; import org.semanticweb.elk.reasoner.saturation.rules.subsumers.ContradictionFromNegationRule; import org.semanticweb.elk.reasoner.saturation.rules.subsumers.ContradictionFromOwlNothingRule; import org.semanticweb.elk.reasoner.saturation.rules.subsumers.DisjointSubsumerFromMemberRule; import org.semanticweb.elk.reasoner.saturation.rules.subsumers.IndexedObjectComplementOfDecomposition; import org.semanticweb.elk.reasoner.saturation.rules.subsumers.IndexedObjectIntersectionOfDecomposition; import org.semanticweb.elk.reasoner.saturation.rules.subsumers.IndexedObjectSomeValuesFromDecomposition; import org.semanticweb.elk.reasoner.saturation.rules.subsumers.ObjectIntersectionFromConjunctRule; import org.semanticweb.elk.reasoner.saturation.rules.subsumers.ObjectUnionFromDisjunctRule; import org.semanticweb.elk.reasoner.saturation.rules.subsumers.PropagationFromExistentialFillerRule; import org.semanticweb.elk.reasoner.saturation.rules.subsumers.SubsumerDecompositionVisitor; import org.semanticweb.elk.reasoner.saturation.rules.subsumers.SuperClassFromSubClassRule; /** * A {@link RuleVisitor} wrapper for a given {@link RuleVisitor} that * additionally records the number of invocations of the methods using the given * {@link RuleCounter}. * * @author "Yevgeny Kazakov" */ public class RuleCounterVisitor implements RuleVisitor { /** * the counter used to count the number of method applications of the * visitor */ private final RuleCounter counter_; /** * the visitor whose method applications to be counted */ private final RuleVisitor visitor_; /** * Creates a new {@link RuleCounterVisitor} that executes the corresponding * methods of the given {@link RuleVisitor} and counts the number of * invocations of the corresponding methods using the given * {@link RuleCounter}. * * @param visitor * the {@link SubsumerDecompositionVisitor} used to execute the * methods * @param counter * the {@link RuleCounter} used to count the number of method * invocations */ public RuleCounterVisitor(RuleVisitor visitor, RuleCounter counter) { this.visitor_ = visitor; this.counter_ = counter; } @Override public void visit(BackwardLinkChainFromBackwardLinkRule rule, BackwardLink premise, ContextPremises premises, ConclusionProducer producer) { counter_.countBackwardLinkChainFromBackwardLinkRule++; visitor_.visit(rule, premise, premises, producer); } @Override public void visit(BackwardLinkFromForwardLinkRule rule, ForwardLink premise, ContextPremises premises, ConclusionProducer producer) { counter_.countBackwardLinkFromForwardLinkRule++; visitor_.visit(rule, premise, premises, producer); } @Override public void visit(ContradicitonCompositionRule rule, DisjointSubsumer premise, ContextPremises premises, ConclusionProducer producer) { counter_.countContradicitonCompositionRule++; visitor_.visit(rule, premise, premises, producer); } @Override public void visit(ContradictionFromDisjointnessRule rule, IndexedClassExpression premise, ContextPremises premises, ConclusionProducer producer) { counter_.countContradictionFromDisjointnessRule++; visitor_.visit(rule, premise, premises, producer); } @Override public void visit(ContradictionFromNegationRule rule, IndexedClassExpression premise, ContextPremises premises, ConclusionProducer producer) { counter_.countContradictionFromNegationRule++; visitor_.visit(rule, premise, premises, producer); } @Override public void visit(ContradictionFromOwlNothingRule rule, IndexedClassExpression premise, ContextPremises premises, ConclusionProducer producer) { counter_.countContradictionFromOwlNothingRule++; visitor_.visit(rule, premise, premises, producer); } @Override public void visit(ContradictionOverBackwardLinkRule rule, BackwardLink premise, ContextPremises premises, ConclusionProducer producer) { counter_.countContradictionOverBackwardLinkRule++; visitor_.visit(rule, premise, premises, producer); } @Override public void visit(ContradictionPropagationRule rule, Contradiction premise, ContextPremises premises, ConclusionProducer producer) { counter_.countContradictionPropagationRule++; visitor_.visit(rule, premise, premises, producer); } @Override public void visit(DisjointSubsumerFromMemberRule rule, IndexedClassExpression premise, ContextPremises premises, ConclusionProducer producer) { counter_.countDisjointSubsumerFromMemberRule++; visitor_.visit(rule, premise, premises, producer); } @Override public void visit(IndexedObjectComplementOfDecomposition rule, IndexedObjectComplementOf premise, ContextPremises premises, ConclusionProducer producer) { counter_.countIndexedObjectComplementOfDecomposition++; visitor_.visit(rule, premise, premises, producer); } @Override public void visit(IndexedObjectIntersectionOfDecomposition rule, IndexedObjectIntersectionOf premise, ContextPremises premises, ConclusionProducer producer) { counter_.countIndexedObjectIntersectionOfDecomposition++; visitor_.visit(rule, premise, premises, producer); } @Override public void visit(IndexedObjectSomeValuesFromDecomposition rule, IndexedObjectSomeValuesFrom premise, ContextPremises premises, ConclusionProducer producer) { counter_.countIndexedObjectSomeValuesFromDecomposition++; visitor_.visit(rule, premise, premises, producer); } @Override public void visit(NonReflexiveBackwardLinkCompositionRule rule, ForwardLink premise, ContextPremises premises, ConclusionProducer producer) { counter_.countNonReflexiveBackwardLinkCompositionRule++; visitor_.visit(rule, premise, premises, producer); } @Override public void visit(NonReflexivePropagationRule rule, Propagation premise, ContextPremises premises, ConclusionProducer producer) { counter_.countNonReflexivePropagationRule++; visitor_.visit(rule, premise, premises, producer); } @Override public void visit(ObjectIntersectionFromConjunctRule rule, IndexedClassExpression premise, ContextPremises premises, ConclusionProducer producer) { counter_.countObjectIntersectionFromConjunctRule++; visitor_.visit(rule, premise, premises, producer); } @Override public void visit(ObjectUnionFromDisjunctRule rule, IndexedClassExpression premise, ContextPremises premises, ConclusionProducer producer) { counter_.countObjectUnionFromDisjunctRule++; visitor_.visit(rule, premise, premises, producer); } @Override public void visit(OwlThingContextInitRule rule, ContextInitialization premise, ContextPremises premises, ConclusionProducer producer) { counter_.countOwlThingContextInitRule++; visitor_.visit(rule, premise, premises, producer); } @Override public void visit(PropagationFromExistentialFillerRule rule, IndexedClassExpression premise, ContextPremises premises, ConclusionProducer producer) { counter_.countPropagationFromExistentialFillerRule++; visitor_.visit(rule, premise, premises, producer); } @Override public void visit(PropagationInitializationRule rule, SubContextInitialization premise, ContextPremises premises, ConclusionProducer producer) { counter_.countPropagationInitializationRule++; visitor_.visit(rule, premise, premises, producer); } @Override public void visit(ReflexiveBackwardLinkCompositionRule rule, ForwardLink premise, ContextPremises premises, ConclusionProducer producer) { counter_.countReflexiveBackwardLinkCompositionRule++; visitor_.visit(rule, premise, premises, producer); } @Override public void visit(ReflexivePropagationRule rule, Propagation premise, ContextPremises premises, ConclusionProducer producer) { counter_.countReflexivePropagationRule++; visitor_.visit(rule, premise, premises, producer); } @Override public void visit(RootContextInitializationRule rule, ContextInitialization premise, ContextPremises premises, ConclusionProducer producer) { counter_.countRootContextInitializationRule++; visitor_.visit(rule, premise, premises, producer); } @Override public void visit(SubsumerBackwardLinkRule rule, BackwardLink premise, ContextPremises premises, ConclusionProducer producer) { counter_.countSubsumerBackwardLinkRule++; visitor_.visit(rule, premise, premises, producer); } @Override public void visit(SuperClassFromSubClassRule rule, IndexedClassExpression premise, ContextPremises premises, ConclusionProducer producer) { counter_.countSuperClassFromSubClassRule++; visitor_.visit(rule, premise, premises, producer); } }
/* * Copyright (c) 2005-2009 Grameen Foundation USA * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * * See also http://www.apache.org/licenses/LICENSE-2.0.html for an * explanation of the license and how it is applied. */ package org.mifos.application.ppi.helpers; import java.io.IOException; import java.io.InputStream; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.mifos.application.ppi.business.PPIChoice; import org.mifos.application.ppi.business.PPILikelihood; import org.mifos.application.ppi.business.PPISurvey; import org.mifos.application.surveys.business.Question; import org.mifos.application.surveys.business.QuestionChoice; import org.mifos.application.surveys.business.SurveyQuestion; import org.mifos.application.surveys.helpers.AnswerType; import org.mifos.config.GeneralConfig; import org.mifos.core.ClasspathResource; import org.mifos.framework.exceptions.ValidationException; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; public class XmlPPISurveyParser { /** TODO: Should be private */ public PPISurvey parseInto(String uri, PPISurvey survey) throws URISyntaxException, IOException, ParserConfigurationException, SAXException { InputStream xml = ClasspathResource.getURI(uri).toURL().openStream(); return parseInto(xml, survey); } private PPISurvey parseInto(InputStream stream, PPISurvey survey) throws ParserConfigurationException, IOException, SAXException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = factory.newDocumentBuilder(); Document document = builder.parse(stream); return parseInto(document, survey); } PPISurvey parseInto(Document document, PPISurvey survey) { Element docElement = document.getDocumentElement(); parseSurveyName(survey, docElement); parseSurveyCountry(survey, docElement); parseSurveyQuestions(survey, docElement); parseSurveyLikelihoods(survey, docElement); return survey; } private void parseSurveyName(PPISurvey survey, Element docElement) { survey.setName(docElement.getAttribute("name")); } private void parseSurveyCountry(PPISurvey survey, Element docElement) { survey.setCountry(Country.valueOf(docElement.getAttribute("country"))); } private void parseSurveyQuestions(PPISurvey survey, Element docElement) { List<SurveyQuestion> surveyQuestions = survey.getQuestions(); boolean emptyQuestionList = surveyQuestions.size() == 0; NodeList questionNodes = docElement.getElementsByTagName("question"); for (int i = 0; i < questionNodes.getLength(); i++) { Element node = (Element) questionNodes.item(i); String name = null; String mandatory = null; Integer order = null; String questionText = node.getElementsByTagName("text").item(0).getTextContent(); if (node.hasAttributes()) { name = node.getAttributes().getNamedItem("name").getNodeValue(); mandatory = node.getAttributes().getNamedItem("mandatory").getNodeValue(); order = Integer.parseInt(node.getAttributes().getNamedItem("order").getNodeValue()); } if (name == null || mandatory == null || order == null || questionText == null) throw new IllegalStateException("Malformatted xml file"); SurveyQuestion surveyQuestion = new SurveyQuestion(); surveyQuestion.setSurvey(survey); Question question = new Question(); if (!emptyQuestionList) { surveyQuestion = surveyQuestions.get(order); question = surveyQuestion.getQuestion(); } else { surveyQuestions.add(surveyQuestion); surveyQuestion.setQuestion(question); } question.setShortName(name); question.setQuestionText(questionText); question.setAnswerType(AnswerType.CHOICE); parseQuestionChoices(node, question, emptyQuestionList); surveyQuestion.setMandatory(Boolean.parseBoolean(mandatory)); surveyQuestion.setOrder(order); } try { verifyQuestionPoints(surveyQuestions); } catch (ValidationException e) { throw new IllegalStateException(e); } survey.setQuestions(surveyQuestions); } private void verifyQuestionPoints(List<SurveyQuestion> surveyQuestions) throws ValidationException { int totalPoints = 0; for (SurveyQuestion surveyQuestion : surveyQuestions) { int largestChoice = 0; for (QuestionChoice qc : surveyQuestion.getQuestion().getChoices()) { PPIChoice choice = (PPIChoice) qc; if (choice.getPoints() > largestChoice) largestChoice = choice.getPoints(); } totalPoints += largestChoice; } int maxPoints = GeneralConfig.getMaxPointsPerPPISurvey(); if (totalPoints > maxPoints) throw new ValidationException("Question choices amount to more than " + maxPoints + " points."); } private void parseQuestionChoices(Element questionNode, Question question, boolean emptyQuestionList) { NodeList choices = questionNode.getElementsByTagName("choice"); for (int i = 0; i < choices.getLength(); i++) { Node node = choices.item(i); PPIChoice choice = new PPIChoice(); if (!emptyQuestionList) choice = (PPIChoice) question.getChoices().get(i); else question.addChoice(choice); choice.setChoiceText(node.getTextContent()); Integer points = Integer.parseInt(node.getAttributes().getNamedItem("points").getNodeValue()); choice.setPoints(points); } } private void parseSurveyLikelihoods(PPISurvey survey, Element docElement) { try { List<PPILikelihood> likelihoodsList = new ArrayList<PPILikelihood>(); NodeList likelihoods = docElement.getElementsByTagName("likelihood"); for (int i = 0; i < likelihoods.getLength(); i++) { PPILikelihood likelihood = parseLikelihood(likelihoods.item(i), i); likelihood.setSurvey(survey); likelihoodsList.add(likelihood); } survey.setLikelihoods(likelihoodsList); } catch (ValidationException e) { throw new IllegalStateException(e); } } private PPILikelihood parseLikelihood(Node node, int order) throws ValidationException { int scoreFrom = Integer.parseInt(node.getAttributes().getNamedItem("scoreFrom").getNodeValue()); int scoreTo = Integer.parseInt(node.getAttributes().getNamedItem("scoreTo").getNodeValue()); double bottomHalfPct = Double.parseDouble(node.getAttributes().getNamedItem("bottomHalf").getNodeValue()); double topHalfPct = Double.parseDouble(node.getAttributes().getNamedItem("topHalf").getNodeValue()); PPILikelihood likelihood = new PPILikelihood(scoreFrom, scoreTo, bottomHalfPct, topHalfPct); likelihood.setOrder(order); return likelihood; } public PPISurvey parse(String uri) throws Exception { return parseInto(uri, new PPISurvey()); } /** * TODO: Do not create new PPISurvey instance here, do it in the body of * parseInto() TODO: This method is never used */ public PPISurvey parse(InputStream stream) throws Exception { return parseInto(stream, new PPISurvey()); } public PPISurvey parse(Document document) throws Exception { return parseInto(document, new PPISurvey()); } // TODO: doesn't look like this method is used anywhere, do we need it? @Deprecated public Document buildXmlFrom(PPISurvey survey) throws Exception { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = factory.newDocumentBuilder(); Document document = builder.newDocument(); Element docElement = document.createElement("ppi"); docElement.setAttribute("country", survey.getCountryAsEnum().toString()); docElement.setAttribute("name", survey.getName()); List<SurveyQuestion> surveyQuestions = survey.getQuestions(); Collections.sort(surveyQuestions); for (SurveyQuestion surveyQuestion : surveyQuestions) { Element questionNode = document.createElement("question"); questionNode.setAttribute("name", surveyQuestion.getQuestion().getShortName()); questionNode.setAttribute("mandatory", surveyQuestion.getMandatory() == 1 ? "true" : "false"); questionNode.setAttribute("order", surveyQuestion.getOrder().toString()); Element text = document.createElement("text"); text.setTextContent(surveyQuestion.getQuestion().getQuestionText()); questionNode.appendChild(text); for (QuestionChoice choice : surveyQuestion.getQuestion().getChoices()) { Element choiceNode = document.createElement("choice"); choiceNode.setAttribute("points", Integer.toString(((PPIChoice) choice).getPoints())); choiceNode.setTextContent(choice.getChoiceText()); questionNode.appendChild(choiceNode); } docElement.appendChild(questionNode); } document.appendChild(docElement); return document; } }
package org.revenj.database.postgres; import ch.epfl.labos.iu.orm.queryll2.symbolic.TypedValueVisitorException; import org.revenj.patterns.DataSource; import org.revenj.patterns.OlapCubeQuery; import org.revenj.patterns.ServiceLocator; import org.revenj.patterns.Specification; import org.revenj.database.postgres.jinq.RevenjQueryComposer; import org.revenj.database.postgres.jinq.jpqlquery.*; import org.revenj.database.postgres.jinq.transform.*; import java.io.IOException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.*; import java.util.function.Function; public abstract class PostgresOlapCubeQuery<TSource extends DataSource> implements OlapCubeQuery<TSource> { protected final ServiceLocator locator; protected final PostgresReader reader; protected final Connection transactionConnection; protected final javax.sql.DataSource dataSource; private final MetamodelUtil metamodel; private final ClassLoader loader; protected abstract String getSource(); protected final Map<String, Function<String, String>> cubeDimensions = new LinkedHashMap<>(); protected final Map<String, Function<String, String>> cubeFacts = new LinkedHashMap<>(); protected final Map<String, Converter> cubeConverters = new LinkedHashMap<>(); @FunctionalInterface public interface Converter { Object convert(PostgresReader reader, int context) throws IOException; } protected PostgresOlapCubeQuery(ServiceLocator locator) { this.locator = locator; this.reader = new PostgresReader(locator); this.loader = locator.resolve(ClassLoader.class); this.transactionConnection = locator.tryResolve(Connection.class).orElse(null); this.dataSource = transactionConnection != null ? null : locator.resolve(javax.sql.DataSource.class); this.metamodel = locator.resolve(MetamodelUtil.class); } @Override public Set<String> getDimensions() { return cubeDimensions.keySet(); } @Override public Set<String> getFacts() { return cubeFacts.keySet(); } private void validateInput(List<String> usedDimensions, List<String> usedFacts, Collection<String> customOrder) { if (usedDimensions.size() == 0 && usedFacts.size() == 0) { throw new IllegalArgumentException("Cube must have at least one dimension or fact."); } for (String d : usedDimensions) { if (!cubeDimensions.containsKey(d)) { throw new IllegalArgumentException("Unknown dimension: " + d + ". Use getDimensions method for available dimensions"); } } for (String f : usedFacts) { if (!cubeFacts.containsKey(f)) { throw new IllegalArgumentException("Unknown fact: " + f + ". Use getFacts method for available facts"); } } for (String o : customOrder) { if (!usedDimensions.contains(o) && !usedFacts.contains(o)) { throw new IllegalArgumentException("Invalid order: " + o + ". Order can be only field from used dimensions and facts."); } } } protected Connection getConnection() { if (transactionConnection != null) { return transactionConnection; } try { return dataSource.getConnection(); } catch (SQLException e) { throw new RuntimeException("Unable to resolve connection for cube query. " + e.getMessage()); } } protected void releaseConnection(Connection connection) throws SQLException { if (transactionConnection == null) { connection.close(); } } private static RevenjQueryTransformConfiguration buildConfig(MetamodelUtil metamodel) { RevenjQueryTransformConfiguration config = new RevenjQueryTransformConfiguration(); config.metamodel = metamodel; config.alternateClassLoader = null; config.isObjectEqualsSafe = true; config.isCollectionContainsSafe = true; return config; } private SelectFromWhere<TSource> applyTransformWithLambda(String name, LambdaInfo lambdaInfo) { if (lambdaInfo == null) { return null; } try { RevenjQueryTransformConfiguration config = buildConfig(metamodel); LambdaAnalysis lambdaAnalysis = lambdaInfo.fullyAnalyze(metamodel, loader, true, true, true, true); if (lambdaAnalysis == null) { return null; } config.checkLambdaSideEffects(lambdaAnalysis); SelectFromWhere<TSource> query = new SelectFromWhere<>(); From.FromDataSource from = new From.FromDataSource(); from.name = name; query.cols = ColumnExpressions.singleColumn(SimpleRowReader.READER, new FromAliasExpression(from)); query.froms.add(from); WhereTransform where = new WhereTransform(config, false); return where.apply(lambdaAnalysis, null, query); } catch (QueryTransformException | TypedValueVisitorException e) { throw new RuntimeException(e); } } protected Specification<TSource> rewriteSpecification(Specification<TSource> specification) { return specification; } public void prepareSql( StringBuilder sb, boolean asRecord, List<String> usedDimensions, List<String> usedFacts, Collection<Map.Entry<String, Boolean>> order, Specification<TSource> filter, Integer limit, Integer offset, List<GeneratedQueryParameter> parameters, List<LambdaInfo> lambdas) { Map<String, Boolean> customOrder = new LinkedHashMap<>(); if (order != null) { for (Map.Entry<String, Boolean> o : order) { if (o.getKey() != null) { customOrder.put(o.getKey(), o.getValue()); } } } validateInput(usedDimensions, usedFacts, customOrder.keySet()); String alias = "_it"; sb.append("SELECT "); if (asRecord) { sb.append("ROW("); } for (String d : usedDimensions) { sb.append(cubeDimensions.get(d).apply(alias)).append(','); } for (String f : usedFacts) { sb.append(cubeFacts.get(f).apply(alias)).append(','); } sb.setLength(sb.length() - 1); if (asRecord) { sb.append(")"); } sb.append(" FROM ").append(getSource()).append(" \"").append(alias).append("\""); if (filter != null) { LambdaInfo lambdaInfo = LambdaInfo.analyze(rewriteSpecification(filter), 0, true); SelectFromWhere<?> sfw = applyTransformWithLambda(alias, lambdaInfo); if (sfw != null && sfw.generateWhere("\"" + alias + "\"")) { sb.append(" WHERE "); sb.append(sfw.getQueryString()); parameters.addAll(sfw.getQueryParameters()); } lambdas.add(lambdaInfo); } if (!usedDimensions.isEmpty()) { sb.append(" GROUP BY "); for (String d : usedDimensions) { sb.append(cubeDimensions.get(d).apply(alias)); sb.append(", "); } sb.setLength(sb.length() - 2); sb.append('\n'); } if (!customOrder.isEmpty()) { sb.append(" ORDER BY "); for (Map.Entry<String, Boolean> o : customOrder.entrySet()) { if (cubeDimensions.containsKey(o.getKey())) { sb.append(cubeDimensions.get(o.getKey()).apply(alias)); } else if (cubeFacts.containsKey(o.getKey())) { sb.append(cubeFacts.get(o.getKey()).apply(alias)); } else { sb.append("\"").append(o.getKey()).append("\""); } sb.append(o.getValue() ? "" : "DESC"); sb.append(", "); } sb.setLength(sb.length() - 2); } if (limit != null) { sb.append(" LIMIT ").append(Integer.toString(limit)); } if (offset != null) { sb.append(" OFFSET ").append(Integer.toString(offset)); } } public Converter[] prepareConverters(List<String> dimensions, List<String> facts) { @SuppressWarnings("unchecked") Converter[] converters = new Converter[dimensions.size() + facts.size()]; for (int i = 0; i < dimensions.size(); i++) { converters[i] = cubeConverters.get(dimensions.get(i)); } for (int i = 0; i < facts.size(); i++) { converters[i + dimensions.size()] = cubeConverters.get(facts.get(i)); } return converters; } @Override public List<Map<String, Object>> analyze( List<String> dimensions, List<String> facts, Collection<Map.Entry<String, Boolean>> order, Specification<TSource> filter, Integer limit, Integer offset) { List<String> usedDimensions = new ArrayList<>(); List<String> usedFacts = new ArrayList<>(); if (dimensions != null) { usedDimensions.addAll(dimensions); } if (facts != null) { usedFacts.addAll(facts); } List<GeneratedQueryParameter> parameters = filter != null ? new ArrayList<>() : null; List<LambdaInfo> lambdas = filter != null ? new ArrayList<>(1) : null; StringBuilder sb = new StringBuilder(); prepareSql(sb, true, usedDimensions, usedFacts, order, filter, limit, offset, parameters, lambdas); Converter[] converters = prepareConverters(usedDimensions, usedFacts); Connection connection = getConnection(); List<Map<String, Object>> result = new ArrayList<>(); try (PreparedStatement ps = connection.prepareStatement(sb.toString())) { if (parameters != null && parameters.size() > 0) { RevenjQueryComposer.fillQueryParameters( connection, locator, ps, 0, parameters, lambdas); } ResultSet rs = ps.executeQuery(); String[] columnNames = new String[usedFacts.size() + usedDimensions.size()]; for (int i = 0; i < usedDimensions.size(); i++) { columnNames[i] = usedDimensions.get(i); } for (int i = 0; i < usedFacts.size(); i++) { columnNames[usedDimensions.size() + i] = usedFacts.get(i); } while (rs.next()) { reader.process(rs.getString(1)); reader.read(); Map<String, Object> item = new LinkedHashMap<>(); for (int i = 0; i < columnNames.length; i++) { item.put(columnNames[i], converters[i].convert(reader, 1)); } result.add(item); } rs.close(); } catch (SQLException | IOException ex) { throw new RuntimeException(ex); } return result; } public ResultSet stream( Connection connection, List<String> dimensions, List<String> facts, Collection<Map.Entry<String, Boolean>> order, Specification<TSource> filter, Integer limit, Integer offset) throws SQLException { List<String> usedDimensions = new ArrayList<>(); List<String> usedFacts = new ArrayList<>(); if (dimensions != null) { usedDimensions.addAll(dimensions); } if (facts != null) { usedFacts.addAll(facts); } List<GeneratedQueryParameter> parameters = filter != null ? new ArrayList<>() : null; List<LambdaInfo> lambdas = filter != null ? new ArrayList<>(1) : null; StringBuilder sb = new StringBuilder(); prepareSql(sb, false, usedDimensions, usedFacts, order, filter, limit, offset, parameters, lambdas); PreparedStatement ps = connection.prepareStatement(sb.toString()); if (parameters != null && parameters.size() > 0) { RevenjQueryComposer.fillQueryParameters( connection, locator, ps, 0, parameters, lambdas); } return ps.executeQuery(); } }
package ro.pub.cs.aipi.lab09.graphicuserinterface; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.util.GregorianCalendar; import java.util.HashMap; import javafx.application.Platform; import javafx.collections.ObservableList; import javafx.event.ActionEvent; import javafx.event.EventHandler; import javafx.fxml.FXML; import javafx.fxml.FXMLLoader; import javafx.scene.Node; import javafx.scene.Parent; import javafx.scene.Scene; import javafx.scene.control.Button; import javafx.scene.control.Label; import javafx.scene.control.Menu; import javafx.scene.control.MenuBar; import javafx.scene.control.MenuItem; import javafx.scene.control.ScrollPane; import javafx.scene.control.SelectionMode; import javafx.scene.control.TreeItem; import javafx.scene.control.TreeView; import javafx.scene.image.Image; import javafx.scene.image.ImageView; import javafx.stage.Stage; import javax.jms.JMSException; import ro.pub.cs.aipi.lab09.communicator.PublishSubscribe; import ro.pub.cs.aipi.lab09.general.Constants; public class ContactsList { private Stage applicationStage; private Scene applicationScene; private ContactsList contactsList; private PublishSubscribe communicator; private String currentUserName; private HashMap<String, MessageExchange> currentMessagingWindows; private TreeItem<String> currentContactsListItem, connectedContactsListItem, recentContactsListItem; private About aboutWindow; @FXML private MenuBar mainMenuBar; @FXML private Label currentUserNameLabel; @FXML private TreeView<String> currentContactsListTreeView; @FXML private Button communicateButton, closeButton; public ContactsList(String userName) { contactsList = this; this.currentUserName = userName; currentMessagingWindows = new HashMap<>(); } @SuppressWarnings("unchecked") public void start() { communicator = new PublishSubscribe(currentUserName, this); applicationStage = new Stage(); try { applicationScene = new Scene((Parent) FXMLLoader.load(getClass().getResource(Constants.CONTACTS_LIST_FXML_FILE))); } catch (Exception exception) { System.out.println("An exception has occurred: " + exception.getMessage()); if (Constants.DEBUG) { exception.printStackTrace(); } } applicationStage.setTitle(Constants.APPLICATION_TITLE); applicationStage.getIcons().add(new Image(getClass().getResource(Constants.ICON_FILE_NAME).toExternalForm())); currentUserNameLabel = (Label) applicationScene.lookup("#currentUserNameLabel"); currentUserNameLabel.setText(currentUserName); ScrollPane currentContactsListScrollPane = (ScrollPane) applicationScene.lookup("#currentContactsListScrollPane"); currentContactsListTreeView = (TreeView<String>)currentContactsListScrollPane.getContent(); currentContactsListTreeView.getSelectionModel().setSelectionMode(SelectionMode.SINGLE); Node currentContactListIcon = new ImageView(new Image(getClass().getResource(Constants.CONTACTS_LIST_ICON).toExternalForm())); currentContactsListItem = new TreeItem<>(Constants.CONTACTS_LIST, currentContactListIcon); currentContactsListItem.setExpanded(true); Node connectedContactListIcon = new ImageView(new Image(getClass().getResource(Constants.CONNECTED_CONTACTS_LIST_ICON).toExternalForm())); connectedContactsListItem = new TreeItem<>(Constants.CONNECTED_CONTACTS_LIST, connectedContactListIcon); currentContactsListItem.getChildren().add(connectedContactsListItem); Node recentContactListIcon = new ImageView(new Image(getClass().getResource(Constants.RECENT_CONTACTS_LIST_ICON).toExternalForm())); recentContactsListItem = new TreeItem<>(Constants.RECENT_CONTACTS_LIST, recentContactListIcon); currentContactsListItem.getChildren().add(recentContactsListItem); readFromFile(); currentContactsListTreeView.setRoot(currentContactsListItem); // TO DO (exercise 6): send login messages to known users ObservableList<TreeItem<String>> recentContactsList = recentContactsListItem.getChildren(); for (TreeItem<String> recentContact: recentContactsList) { String contactUserName = recentContact.getValue(); try { communicator.publish(contactUserName, Constants.LOGIN_MESSAGE); } catch (JMSException | IOException exception) { System.out.println("An exception has occurred: " + exception.getMessage()); if (Constants.DEBUG) { exception.printStackTrace(); } } } mainMenuBar = (MenuBar) applicationScene.lookup("#mainMenuBar"); ObservableList<Menu> mainMenuBarList = mainMenuBar.getMenus(); Menu operationsMenu = mainMenuBarList.get(Constants.OPERATIONS_MENU_INDEX); Menu helpMenu = mainMenuBarList.get(Constants.HELP_MENU_INDEX); ObservableList<MenuItem> operationsMenuList = operationsMenu.getItems(); MenuItem communicateMenuItem = operationsMenuList.get(Constants.COMMUNICATE_MENU_INDEX); EventHandler<ActionEvent> communicateEventHandler = new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { communicate(); } }; communicateMenuItem.setOnAction(communicateEventHandler); EventHandler<ActionEvent> closeEventHandler = new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { close(); } }; MenuItem closeMenuItem = operationsMenuList.get(Constants.CLOSE_MENU_INDEX); closeMenuItem.setOnAction(closeEventHandler); ObservableList<MenuItem> aboutMenuList = helpMenu.getItems(); MenuItem aboutMenuItem = aboutMenuList.get(Constants.ABOUT_MENU_INDEX); aboutMenuItem.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent event) { about(); } }); communicateButton = (Button) applicationScene.lookup("#communicateButton"); communicateButton.setOnAction(communicateEventHandler); closeButton = (Button) applicationScene.lookup("#closeButton"); closeButton.setOnAction(closeEventHandler); applicationStage.setScene(applicationScene); applicationStage.show(); } public void show() { applicationStage.show(); } public void hide() { applicationStage.hide(); } public void handleCurrentMessagingWindows(String interlocutorUserName, MessageExchange messageExchange) { if (!currentMessagingWindows.containsKey(interlocutorUserName)) { currentMessagingWindows.put(interlocutorUserName, messageExchange); } ObservableList<TreeItem<String>> recentContactsList = recentContactsListItem.getChildren(); boolean found = false; if (recentContactsList != null) { for (TreeItem<String> recentContact : recentContactsList) { String contactUserName = recentContact.getValue(); if (contactUserName.equals(interlocutorUserName)) { found = true; } } } if (!found) { recentContactsListItem.getChildren().add(new TreeItem<>(interlocutorUserName)); writeToFile(); } } public void handleMessage(String interlocutorUserName, String messageContent, GregorianCalendar messageDateTime) { MessageExchange messageExchange; // TO DO (exercise 6): analyze login messages and update // connectedContactsListItem if (messageContent != null && messageContent.equals(Constants.LOGIN_MESSAGE)) { ObservableList<TreeItem<String>> connectedContactsList = connectedContactsListItem.getChildren(); boolean found = false; if (connectedContactsList != null) { for (TreeItem<String> connectedContact : connectedContactsList) { String contactUserName = connectedContact.getValue(); if (contactUserName.equals(interlocutorUserName)) { found = true; } } } if (!found) { connectedContactsListItem.getChildren().add(new TreeItem<>(interlocutorUserName)); } return; } // TO DO (exercise 6): analyze logout messages and update // connectedContactsListItem if (messageContent != null && messageContent.equals(Constants.LOGOUT_MESSAGE)) { ObservableList<TreeItem<String>> connectedContactsList = connectedContactsListItem.getChildren(); int position = -1; if (connectedContactsList != null) { int currentPosition = 0; for (TreeItem<String> connectedContact : connectedContactsList) { String contactUserName = connectedContact.getValue(); if (contactUserName.equals(interlocutorUserName)) { position = currentPosition; } currentPosition++; } } if (position != -1) { connectedContactsListItem.getChildren().remove(position); } return; } if (interlocutorUserName == null) { messageExchange = new MessageExchange(currentUserName, communicator, contactsList); messageExchange.start(); } else if (!currentMessagingWindows.containsKey(interlocutorUserName)) { messageExchange = new MessageExchange(currentUserName, interlocutorUserName, communicator, contactsList); messageExchange.start(); handleCurrentMessagingWindows(interlocutorUserName, messageExchange); } else { messageExchange = currentMessagingWindows.get(interlocutorUserName); messageExchange.show(); } if (messageContent != null) { messageExchange.handleConversationHistory(messageContent, messageDateTime); } } private void communicate() { TreeItem<String> currentSelectionTreeItem = currentContactsListTreeView.getSelectionModel().getSelectedItem(); String currentSelection = null; if (currentSelectionTreeItem != null) { currentSelection = currentSelectionTreeItem.getValue(); } if (currentSelection != null && isUserName(currentSelection)) { handleMessage(currentSelection, null, null); } else { handleMessage(null, null, null); } } private void close() { // TO DO (exercise 6): send logout messages to known users ObservableList<TreeItem<String>> recentContactsList = recentContactsListItem.getChildren(); for (TreeItem<String> recentContact : recentContactsList) { String contactUserName = recentContact.getValue(); try { communicator.publish(contactUserName, Constants.LOGOUT_MESSAGE); } catch (JMSException | IOException exception) { System.out.println("An exception has occurred: " + exception.getMessage()); if (Constants.DEBUG) { exception.printStackTrace(); } } } communicator.close(); Platform.exit(); } private void about() { if (aboutWindow == null) { aboutWindow = new About(); aboutWindow.start(); } else { aboutWindow.show(); } } private void readFromFile() { createFile(); try (RandomAccessFile file = new RandomAccessFile(Constants.TEMPORARY_DIRECTORY + currentUserName, "rw")) { String contactUserName; while ((contactUserName = file.readLine()) != null) { recentContactsListItem.getChildren().add(new TreeItem<>(contactUserName)); } file.close(); } catch (IOException exception) { System.out.println("An exception has occurred: " + exception); if (Constants.DEBUG) { exception.printStackTrace(); } } } private void writeToFile() { deleteFile(); try (RandomAccessFile file = new RandomAccessFile(Constants.TEMPORARY_DIRECTORY + currentUserName, "rw")) { ObservableList<TreeItem<String>> recentContactsList = recentContactsListItem.getChildren(); for (TreeItem<String> recentContact : recentContactsList) { String contactUserName = recentContact.getValue(); file.writeBytes(contactUserName + Constants.NEW_LINE); } file.close(); } catch (IOException exception) { System.out.println("An exception has occurred: " + exception); if (Constants.DEBUG) { exception.printStackTrace(); } } } private void createFile() { File file = new File(Constants.TEMPORARY_DIRECTORY + currentUserName); if (!file.exists()) { try { file.createNewFile(); } catch (IOException exception) { System.out.println("An exception has occurred: " + exception.getMessage()); if (Constants.DEBUG) { exception.printStackTrace(); } } } } private void deleteFile() { File file = new File(Constants.TEMPORARY_DIRECTORY + currentUserName); if (file.exists()) { file.delete(); } } private boolean isUserName(String currentSelection) { if (currentSelection.equals(currentContactsListTreeView.getRoot().getValue())) { return false; } for (TreeItem<String> currentContactsListTreeItem: currentContactsListTreeView.getRoot().getChildren()) { if (currentSelection.equals(currentContactsListTreeItem.getValue())) { return false; } } return true; } }
package org.artifactory.storage.db.aql.sql.builder.query.sql.type; import com.google.common.base.Function; import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.artifactory.aql.AqlException; import org.artifactory.aql.model.AqlDomainEnum; import org.artifactory.aql.model.AqlFieldEnum; import org.artifactory.aql.model.AqlOperatorEnum; import org.artifactory.aql.model.AqlTableFieldsEnum; import org.artifactory.aql.model.DomainSensitiveField; import org.artifactory.storage.db.aql.sql.builder.links.TableLink; import org.artifactory.storage.db.aql.sql.builder.links.TableLinkBrowser; import org.artifactory.storage.db.aql.sql.builder.links.TableLinkRelation; import org.artifactory.storage.db.aql.sql.builder.query.aql.*; import org.artifactory.storage.db.aql.sql.builder.query.sql.SqlTable; import org.artifactory.storage.db.aql.sql.model.AqlFieldExtensionEnum; import org.artifactory.storage.db.aql.sql.model.SqlTableEnum; import org.artifactory.util.Pair; import javax.annotation.Nullable; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import static org.artifactory.storage.db.aql.sql.builder.query.sql.type.AqlTableGraph.tablesLinksMap; import static org.artifactory.storage.db.aql.sql.model.AqlFieldExtensionEnum.getExtensionFor; /** * This is actually the class that contains all the code that converts the AqlQuery to sqlQuery. * * @author Gidi Shabat */ public abstract class BasicSqlGenerator { public final Map<SqlTableEnum, Map<SqlTableEnum, List<TableLinkRelation>>> tableRouteMap; Function<DomainSensitiveField, DomainSensitiveTable> toTables = new Function<DomainSensitiveField, DomainSensitiveTable>() { @Nullable @Override public DomainSensitiveTable apply(@Nullable DomainSensitiveField input) { if (input == null) { return null; } AqlFieldExtensionEnum extension = getExtensionFor(input.getField()); List<SqlTableEnum> tables = generateTableListFromSubDomainAndField(input.getSubDomains()); SqlTable table = tablesLinksMap.get(extension.table).getTable(); return new DomainSensitiveTable(table, tables); } }; Function<AqlQueryElement, DomainSensitiveTable> firstTableFromCriteria = new Function<AqlQueryElement, DomainSensitiveTable>() { @Nullable @Override public DomainSensitiveTable apply(@Nullable AqlQueryElement input) { SqlTable table = input != null ? ((Criteria) input).getTable1() : null; if (table != null) { List<SqlTableEnum> tables = generateTableListFromSubDomainAndField(((Criteria) input).getSubDomains()); return new DomainSensitiveTable(table, tables); } return null; } }; Predicate<DomainSensitiveTable> notNull = new Predicate<DomainSensitiveTable>() { @Override public boolean apply(@Nullable DomainSensitiveTable input) { return input != null; } }; Predicate<AqlQueryElement> criteriasOnly = new Predicate<AqlQueryElement>() { @Override public boolean apply(@Nullable AqlQueryElement input) { return input instanceof Criteria; } }; Function<DomainSensitiveTable, SqlTableEnum> toTableEnum = new Function<DomainSensitiveTable, SqlTableEnum>() { @Nullable @Override public SqlTableEnum apply(@Nullable DomainSensitiveTable input) { return input != null ? input.getTable().getTable() : null; } }; /** * The constructor scans the table schema and creates a map that contains the shortest route between to tables */ protected BasicSqlGenerator() { Map<SqlTableEnum, Map<SqlTableEnum, List<TableLinkRelation>>> routeMap = Maps.newHashMap(); for (TableLink from : tablesLinksMap.values()) { for (TableLink to : tablesLinksMap.values()) { List<TableLinkRelation> route = findShortestPathBetween(from, to); Map<SqlTableEnum, List<TableLinkRelation>> toRouteMap = routeMap.get(from.getTableEnum()); if (toRouteMap == null) { toRouteMap = Maps.newHashMap(); routeMap.put(from.getTableEnum(), toRouteMap); } toRouteMap.put(to.getTableEnum(), route); } } tableRouteMap = routeMap; } /** * The method generates the result part of the SQL query */ public String results(AqlQuery aqlQuery) { StringBuilder result = new StringBuilder(); result.append(" "); Iterator<DomainSensitiveField> iterator = aqlQuery.getResultFields().iterator(); while (iterator.hasNext()) { DomainSensitiveField nextField = iterator.next(); AqlFieldEnum fieldEnum = nextField.getField(); AqlFieldExtensionEnum next = getExtensionFor(fieldEnum); SqlTable table = tablesLinksMap.get(next.table).getTable(); result.append(table.getAlias()).append(next.tableField); if (iterator.hasNext()) { result.append(","); } else { result.append(" "); } } return result.toString(); } /** * This is one of the most important and complicates parts in the Aql mechanism * Its task is to create the tables declaration part in the SQL query * the method does this with the help "sub domains" : Each field in the result fields and in the criteria * contains a list of domain that represent the route to the main domain, so basically, in order to bind one field * to the other we can trace the sub domains and bind each field to the "Main Table" * The problem with tracing the sub domain is that there is no injective match match between the tables and the domains * therefore we use the tablesLinksMap that contain the shortest route between to tabales and help us to ensures * that in "threaded form" we will bind all the tables needed from the * "Field table" to the "Main table" * * @param aqlQuery * @return */ public String tables(AqlQuery aqlQuery) { Set<SqlTable> usedTables = Sets.newHashSet(); StringBuilder join = new StringBuilder(); join.append(" "); // Get all Result tables Iterable<DomainSensitiveTable> resultTables = Iterables.transform(aqlQuery.getResultFields(), toTables); // Find all the criterias Iterable<AqlQueryElement> filter = Iterables.filter(aqlQuery.getAqlElements(), criteriasOnly); // Get the tables from the criterias Iterable<DomainSensitiveTable> criteriasTables = Iterables.transform(filter, firstTableFromCriteria); // Concatenate the resultTables and the criteriasTables Iterable<DomainSensitiveTable> allTables = Iterables.concat(resultTables, criteriasTables); // Resolve Join type (inner join or left outer join) for better performance AqlJoinTypeEnum joinTypeEnum = resolveJoinType(allTables); // Clean null tables if exists allTables = Iterables.filter(allTables, notNull); SqlTable mainTable = tablesLinksMap.get(getMainTable()).getTable(); // Join the main table as first table (not join) joinTable(mainTable, null, null, null, usedTables, join, true, joinTypeEnum); for (DomainSensitiveTable table : allTables) { TableLink to; // Resolve the first table : which is always the "Main Table" SqlTableEnum fromTableEnum = table.getTables().get(0); // Find the route to the target ("to") table and add a join for each table in the route TableLink from = tablesLinksMap.get(fromTableEnum); for (int i = 1; i < table.getTables().size(); i++) { SqlTableEnum toTableEnum = table.getTables().get(i); to = tablesLinksMap.get(toTableEnum); List<TableLinkRelation> relations = tableRouteMap.get(from.getTableEnum()).get(to.getTableEnum()); generateJoinTables(relations, usedTables, join, joinTypeEnum); from = to; } // Finally add a join to the field table to = tablesLinksMap.get(table.getTable().getTable()); List<TableLinkRelation> relations = tableRouteMap.get(from.getTableEnum()).get(to.getTableEnum()); generateJoinTables(relations, usedTables, join, joinTypeEnum, table.getTable()); } return join.toString()+ " "; } /** * The method create the where part of the SQL query. * It actually scan all the criterias and Parenthesis elements in the AQL Query * and transform does elements into SQL syntax. * * @param aqlQuery * @return * @throws AqlException */ public Pair<String, List<Object>> conditions(AqlQuery aqlQuery) throws AqlException { StringBuilder condition = new StringBuilder(); List<Object> params = Lists.newArrayList(); for (AqlQueryElement aqlQueryElement : aqlQuery.getAqlElements()) { if (aqlQueryElement instanceof ComplexPropertyCriteria || aqlQueryElement instanceof SimpleCriteria || aqlQueryElement instanceof SimplePropertyCriteria) { Criteria criteria = (Criteria) aqlQueryElement; condition.append(criteria.toSql(params)); } if (aqlQueryElement instanceof OperatorQueryElement) { AqlOperatorEnum operatorEnum = ((OperatorQueryElement) aqlQueryElement).getOperatorEnum(); condition.append(" ").append(operatorEnum.name()); } if (aqlQueryElement instanceof OpenParenthesisAqlElement) { condition.append("("); } if (aqlQueryElement instanceof CloseParenthesisAqlElement) { condition.append(")"); } } return new Pair(condition.toString()+" ", params); } private List<TableLinkRelation> findShortestPathBetween(TableLink from, TableLink to) { List<TableLinkRelation> relations = TableLinkBrowser.create().findPathTo(from, to, getExclude()); if (relations == null) { ArrayList<TableLink> excludes = Lists.newArrayList(); relations = TableLinkBrowser.create().findPathTo(from, to, excludes); } relations = overrideRoute(relations); return relations; } protected abstract List<TableLink> getExclude(); protected List<TableLinkRelation> overrideRoute(List<TableLinkRelation> route) { return route; } protected void generateJoinTables(List<TableLinkRelation> relations, Set<SqlTable> usedTables, StringBuilder join, AqlJoinTypeEnum joinTypeEnum) { if (relations == null) { return; } for (TableLinkRelation relation : relations) { AqlTableFieldsEnum fromField = relation.getFromField(); SqlTable fromTable = relation.getFromTable().getTable(); AqlTableFieldsEnum toFiled = relation.getToFiled(); SqlTable toTable = relation.getToTable().getTable(); joinTable(toTable, toFiled, fromTable, fromField, usedTables, join, false, joinTypeEnum); } } protected void generateJoinTables(List<TableLinkRelation> relations, Set<SqlTable> usedTables, StringBuilder join, AqlJoinTypeEnum joinTypeEnum, SqlTable sqlTable) { if (relations == null) { return; } for (TableLinkRelation relation : relations) { AqlTableFieldsEnum fromField = relation.getFromField(); SqlTable fromTable = relation.getFromTable().getTable(); AqlTableFieldsEnum toFiled = relation.getToFiled(); SqlTable toTable = relation.getToTable().getTable(); toTable = toTable.getTable() == sqlTable.getTable() ? sqlTable : toTable; joinTable(toTable, toFiled, fromTable, fromField, usedTables, join, false, joinTypeEnum); } } protected void joinTable(SqlTable table, AqlTableFieldsEnum tableJoinField, SqlTable onTable, AqlTableFieldsEnum onJoinFiled, Set<SqlTable> declaredTables, StringBuilder join, boolean first, AqlJoinTypeEnum joinTypeEnum) { if (!declaredTables.contains(table)) { if (first) { join.append(table.getTableName()).append(" ").append(table.getAliasDeclaration()); } else { join.append(" ").append(joinTypeEnum.signature).append(" ").append(table.getTableName()).append( " ").append( table.getAliasDeclaration()); join.append(" on ").append(table.getAlias()).append(tableJoinField). append(" = ").append(onTable.getAlias()).append(onJoinFiled); } } declaredTables.add(table); } public String sort(AqlQuery aqlQuery) { SortDetails sortDetails = aqlQuery.getSort(); if (sortDetails == null || sortDetails.getFields().size() == 0) { return null; } StringBuilder stringBuilder = new StringBuilder(); List<AqlFieldEnum> fields = sortDetails.getFields(); Iterator<AqlFieldEnum> iterator = fields.iterator(); while (iterator.hasNext()) { AqlFieldEnum sortField = iterator.next(); AqlFieldExtensionEnum extension = getExtensionFor(sortField); SqlTable table = tablesLinksMap.get(extension.table).getTable(); stringBuilder.append(table.getAlias()).append(extension.tableField); stringBuilder.append(" ").append(sortDetails.getSortType().getSqlName()); if (iterator.hasNext()) { stringBuilder.append(","); }else { stringBuilder.append(" "); } } return stringBuilder.toString(); } /** * Query performance optimisation: * In case of single table join such as multiple properties table join * without the usage of any other table we can use inner join for better performance. * * @param allTables * @return */ private AqlJoinTypeEnum resolveJoinType(Iterable<DomainSensitiveTable> allTables) { Iterable<SqlTableEnum> tables = Iterables.transform(allTables, toTableEnum); HashSet<SqlTableEnum> tableEnums = Sets.newHashSet(); for (SqlTableEnum table : tables) { if (table != null) { tableEnums.add(table); } } if (tableEnums.size() == 1) { return AqlJoinTypeEnum.innerJoin; } else { return AqlJoinTypeEnum.leftOuterJoin; } } protected abstract SqlTableEnum getMainTable(); private List<SqlTableEnum> generateTableListFromSubDomainAndField(List<AqlDomainEnum> subDomains) { List<SqlTableEnum> result = Lists.newArrayList(); if (subDomains.size() > 1) { for (int i = 0; i < subDomains.size() - 1; i++) { result.add(domainToTable(subDomains.get(i))); } } else { result.add(domainToTable(subDomains.get(0))); } return result; } private SqlTableEnum domainToTable(AqlDomainEnum domainEnum) { switch (domainEnum) { case archives: return SqlTableEnum.archive_names; case items: return SqlTableEnum.nodes; case properties: return SqlTableEnum.node_props; case statistics: return SqlTableEnum.stats; case builds: return SqlTableEnum.builds; case buildProperties: return SqlTableEnum.build_props; case artifacts: return SqlTableEnum.build_artifacts; case dependencies: return SqlTableEnum.build_dependencies; case modules: return SqlTableEnum.build_modules; case moduleProperties: return SqlTableEnum.module_props; } return null; } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.analysis.actions; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.base.Objects; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.devtools.build.lib.actions.ActionExecutionContext; import com.google.devtools.build.lib.actions.ActionOwner; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable; import com.google.devtools.build.lib.syntax.SkylarkDict; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.ResourceFileLoader; import com.google.devtools.build.lib.util.StringUtilities; import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.Path; import com.google.protobuf.ByteString; import java.io.IOException; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.Collection; import java.util.List; /** * Action to expand a template and write the expanded content to a file. */ @Immutable // if all substitutions are immutable public final class TemplateExpansionAction extends AbstractFileWriteAction { private static final String GUID = "786c1fe0-dca8-407a-b108-e1ecd6d1bc7f"; /** * A pair of a string to be substituted and a string to substitute it with. * For simplicity, these are called key and value. All implementations must * be immutable, and always return the identical key. The returned values * must be the same, though they need not be the same object. * * <p>It should be assumed that the {@link #getKey} invocation is cheap, and * that the {@link #getValue} invocation is expensive. */ @Immutable // if the keys and values in the passed in lists and maps are all immutable public abstract static class Substitution { private Substitution() { } public abstract String getKey(); public abstract String getValue(); /** * Returns an immutable Substitution instance for the given key and value. */ public static Substitution of(final String key, final String value) { return new Substitution() { @Override public String getKey() { return key; } @Override public String getValue() { return value; } }; } /** * Returns an immutable Substitution instance for the key and list of values. The * values will be joined by spaces before substitution. */ public static Substitution ofSpaceSeparatedList( final String key, final ImmutableList<?> value) { return new Substitution() { @Override public String getKey() { return key; } @Override public String getValue() { return Joiner.on(" ").join(value); } }; } /** * Returns an immutable Substitution instance for the key and map of values. Corresponding * values in the map will be joined with "=", and pairs will be joined by spaces before * substitution. * * <p>For example, the map <(a,1), (b,2), (c,3)> will become "a=1 b=2 c=3". */ public static Substitution ofSpaceSeparatedMap( final String key, final ImmutableMap<?, ?> value) { return new Substitution() { @Override public String getKey() { return key; } @Override public String getValue() { return Joiner.on(" ").withKeyValueSeparator("=").join(value); } }; } @Override public boolean equals(Object object) { if (this == object) { return true; } if (object instanceof Substitution) { Substitution substitution = (Substitution) object; return substitution.getKey().equals(this.getKey()) && substitution.getValue().equals(this.getValue()); } return false; } @Override public int hashCode() { return Objects.hashCode(getKey(), getValue()); } @Override public String toString() { return "Substitution(" + getKey() + " -> " + getValue() + ")"; } } /** * A substitution with a fixed key, and a computed value. The computed value * must not change over the lifetime of an instance, though the {@link * #getValue} method may return different String objects. * * <p>It should be assumed that the {@link #getKey} invocation is cheap, and * that the {@link #getValue} invocation is expensive. */ public abstract static class ComputedSubstitution extends Substitution { private final String key; public ComputedSubstitution(String key) { this.key = key; } @Override public String getKey() { return key; } } /** * A template that contains text content, or alternatively throws an {@link * IOException}. */ @Immutable // all subclasses are immutable public abstract static class Template { private static final Charset DEFAULT_CHARSET = StandardCharsets.UTF_8; /** * We only allow subclasses in this file. */ private Template() { } /** * Returns the text content of the template. */ protected abstract String getContent() throws IOException; /** * Returns a string that is used for the action key. This must change if * the getContent method returns something different, but is not allowed to * throw an exception. */ protected abstract String getKey(); /** * Loads a template from the given resource. The resource is looked up * relative to the given class. If the resource cannot be loaded, the returned * template throws an {@link IOException} when {@link #getContent} is * called. This makes it safe to use this method in a constant initializer. */ public static Template forResource(final Class<?> relativeToClass, final String templateName) { try { String content = ResourceFileLoader.loadResource(relativeToClass, templateName); return forString(content); } catch (final IOException e) { return new Template() { @Override protected String getContent() throws IOException { throw new IOException("failed to load resource file '" + templateName + "' due to I/O error: " + e.getMessage(), e); } @Override protected String getKey() { return "ERROR: " + e.getMessage(); } }; } } /** * Returns a template for the given text string. */ public static Template forString(final String templateText) { return new Template() { @Override protected String getContent() { return templateText; } @Override protected String getKey() { return templateText; } }; } /** * Returns a template that loads the given artifact. It is important that * the artifact is also an input for the action, or this won't work. * Therefore this method is private, and you should use the corresponding * {@link TemplateExpansionAction} constructor. */ private static Template forArtifact(final Artifact templateArtifact) { return new Template() { @Override protected String getContent() throws IOException { Path templatePath = templateArtifact.getPath(); try { return FileSystemUtils.readContent(templatePath, DEFAULT_CHARSET); } catch (IOException e) { throw new IOException("failed to load template file '" + templatePath.getPathString() + "' due to I/O error: " + e.getMessage(), e); } } @Override protected String getKey() { // This isn't strictly necessary, because the action inputs are automatically considered. return "ARTIFACT: " + templateArtifact.getExecPathString(); } }; } } private final Template template; private final ImmutableList<Substitution> substitutions; /** * Creates a new TemplateExpansionAction instance. * * @param owner the action owner. * @param inputs the Artifacts that this Action depends on * @param output the Artifact that will be created by executing this Action. * @param template the template that will be expanded by this Action. * @param substitutions the substitutions that will be applied to the * template. All substitutions will be applied in order. * @param makeExecutable iff true will change the output file to be * executable. */ private TemplateExpansionAction(ActionOwner owner, Collection<Artifact> inputs, Artifact output, Template template, List<Substitution> substitutions, boolean makeExecutable) { super(owner, inputs, output, makeExecutable); this.template = template; this.substitutions = ImmutableList.copyOf(substitutions); } /** * Creates a new TemplateExpansionAction instance for an artifact template. * * @param owner the action owner. * @param templateArtifact the Artifact that will be read as the text template * file * @param output the Artifact that will be created by executing this Action. * @param substitutions the substitutions that will be applied to the * template. All substitutions will be applied in order. * @param makeExecutable iff true will change the output file to be * executable. */ public TemplateExpansionAction(ActionOwner owner, Artifact templateArtifact, Artifact output, List<Substitution> substitutions, boolean makeExecutable) { this(owner, ImmutableList.of(templateArtifact), output, Template.forArtifact(templateArtifact), substitutions, makeExecutable); } /** * Creates a new TemplateExpansionAction instance without inputs. * * @param owner the action owner. * @param output the Artifact that will be created by executing this Action. * @param template the template * @param substitutions the substitutions that will be applied to the * template. All substitutions will be applied in order. * @param makeExecutable iff true will change the output file to be * executable. */ public TemplateExpansionAction(ActionOwner owner, Artifact output, Template template, List<Substitution> substitutions, boolean makeExecutable) { this(owner, Artifact.NO_ARTIFACTS, output, template, substitutions, makeExecutable); } /** * Expands the template by applying all substitutions. * @param template * @return the expanded text. */ private String expandTemplate(String template) { for (Substitution entry : substitutions) { template = StringUtilities.replaceAllLiteral(template, entry.getKey(), entry.getValue()); } return template; } @VisibleForTesting public String getFileContents() throws IOException { return expandTemplate(template.getContent()); } @Override public String getSkylarkContent() throws IOException { return getFileContents(); } @Override public DeterministicWriter newDeterministicWriter(ActionExecutionContext ctx) throws IOException { return new ByteStringDeterministicWriter( ByteString.copyFrom(getFileContents().getBytes(Template.DEFAULT_CHARSET))); } @Override protected String computeKey() { Fingerprint f = new Fingerprint(); f.addString(GUID); f.addString(String.valueOf(makeExecutable)); f.addString(template.getKey()); f.addInt(substitutions.size()); for (Substitution entry : substitutions) { f.addString(entry.getKey()); f.addString(entry.getValue()); } return f.hexDigestAndReset(); } @Override public String getMnemonic() { return "TemplateExpand"; } @Override protected String getRawProgressMessage() { return "Expanding template " + Iterables.getOnlyElement(getOutputs()).prettyPrint(); } public List<Substitution> getSubstitutions() { return substitutions; } @Override public SkylarkDict<String, String> getSkylarkSubstitutions() { ImmutableMap.Builder<String, String> builder = ImmutableMap.builder(); for (Substitution entry : substitutions) { builder.put(entry.getKey(), entry.getValue()); } return SkylarkDict.copyOf(null, builder.build()); } }
/* * Copyright (c) 2016, Adam Brusselback * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.gosimple.jpgagent.job; import com.gosimple.jpgagent.*; import com.gosimple.jpgagent.annotation.AnnotationUtil; import com.gosimple.jpgagent.database.Database; import com.gosimple.jpgagent.email.EmailUtil; import com.gosimple.jpgagent.job.step.*; import com.gosimple.jpgagent.thread.CancellableRunnable; import com.gosimple.jpgagent.thread.ExecutionUtil; import java.sql.PreparedStatement; import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.Future; public class Job implements CancellableRunnable { private final int job_id; private int job_log_id; private String job_name; private String job_comment; private JobStatus job_status; private List<JobStep> job_step_list; private final Map<JobStep, Future> future_map = new HashMap<>(); private Long start_time; /* * Annotation settings */ // Timeout setting to abort job if running longer than this value. private Long job_timeout = null; // List of status to send an email on private final List<JobStatus> email_on = new ArrayList<>(); // Email to list private String[] email_to = null; // Email subject private String email_subject = null; // Email body private String email_body = null; public Job(final int job_id, final String job_name, final String job_comment, final int job_log_id) { Config.INSTANCE.logger.debug("Job: {} - Instantiating Job begin.", job_id); this.job_id = job_id; this.job_name = job_name; this.job_comment = job_comment; this.job_log_id = job_log_id; processAnnotations(); Config.INSTANCE.logger.debug("Job: {} - Instantiating Job complete.", job_id); } public void run() { try { Config.INSTANCE.logger.info("Job: {} - Started.", this.job_id); this.start_time = System.currentTimeMillis(); boolean failed_step = false; try { for (JobStep job_step : job_step_list) { if (!job_step.canRunInParallel()) { // Block until all steps submitted before are done. waitOnRunningJobSteps(); } // Submit task. future_map.put(job_step, ExecutionUtil.INSTANCE.submitTask(job_step)); } // Block until all JobSteps are done. waitOnRunningJobSteps(); for (JobStep job_step : job_step_list) { if (StepStatus.FAIL.equals(job_step.getStepStatus()) && OnError.FAIL.equals(job_step.getOnError())) { failed_step = true; } } } catch (InterruptedException e) { job_status = JobStatus.ABORTED; } if (job_status == null && job_step_list.isEmpty()) { job_status = JobStatus.IGNORE; } else if (job_status == null && failed_step) { job_status = JobStatus.FAIL; } else if (job_status == null) { job_status = JobStatus.SUCCEED; } } catch (Exception e) { job_status = JobStatus.FAIL; Config.INSTANCE.logger.error("Job: {} - Job has failed.", this.job_id); Config.INSTANCE.logger.error("Job: {} - Message: {}", this.job_id, e.getMessage()); for(StackTraceElement stackTrace : e.getStackTrace()) { Config.INSTANCE.logger.error("Job: {} - Stack Trace: {}", this.job_id, stackTrace.toString()); } } clearJobAgent(); // Update the log record with the result JobLog.finishLog(job_log_id, job_status); if(email_on.contains(job_status)) { // Token replacement email_subject = email_subject.replaceAll(Config.INSTANCE.status_token, job_status.name()); email_body = email_body.replaceAll(Config.INSTANCE.status_token, job_status.name()); email_subject = email_subject.replaceAll(Config.INSTANCE.job_name_token, job_name); email_body = email_body.replaceAll(Config.INSTANCE.job_name_token, job_name); // Send email EmailUtil.sendEmailFromNoReply(email_to, email_subject, email_body); } Config.INSTANCE.logger.info("Job: {} - Complete.", this.job_id); } private void clearJobAgent() { final String update_job_sql = Config.INSTANCE.sql.getProperty("sql.job.clear_job_agent"); try (final PreparedStatement update_job_statement = Database.INSTANCE.getMainConnection().prepareStatement(update_job_sql)) { update_job_statement.setInt(1, job_id); update_job_statement.execute(); } catch (SQLException e) { Config.INSTANCE.logger.error("Job: {} - There was an error clearing the job agent from the job.", this.job_id); Config.INSTANCE.logger.error("Job: {} - Message: {}", this.job_id, e.getMessage()); Config.INSTANCE.logger.error("Running cleanup next poll to fix this issue."); JPGAgent.runCleanup(); } } /** * Assign any values from annotations. */ private void processAnnotations() { try { Map<String, String> annotations = AnnotationUtil.parseAnnotations(job_comment); if (annotations.containsKey(JobAnnotations.JOB_TIMEOUT.name())) { job_timeout = AnnotationUtil.parseValue(JobAnnotations.JOB_TIMEOUT, annotations.get(JobAnnotations.JOB_TIMEOUT.name()), Long.class); } if (annotations.containsKey(JobAnnotations.EMAIL_ON.name())) { for (String email_on_string : AnnotationUtil.parseValue(JobAnnotations.EMAIL_ON, annotations.get(JobAnnotations.EMAIL_ON.name()), String.class).split(";")) { email_on.add(JobStatus.valueOf(email_on_string)); } } if (annotations.containsKey(JobAnnotations.EMAIL_TO.name())) { email_to = AnnotationUtil.parseValue(JobAnnotations.EMAIL_TO, annotations.get(JobAnnotations.EMAIL_TO.name()), String.class).split(";"); } if (annotations.containsKey(JobAnnotations.EMAIL_SUBJECT.name())) { email_subject = AnnotationUtil.parseValue(JobAnnotations.EMAIL_SUBJECT, annotations.get(JobAnnotations.EMAIL_SUBJECT.name()), String.class); } if (annotations.containsKey(JobAnnotations.EMAIL_BODY.name())) { email_body = AnnotationUtil.parseValue(JobAnnotations.EMAIL_BODY, annotations.get(JobAnnotations.EMAIL_BODY.name()), String.class); } } catch (Exception e) { Config.INSTANCE.logger.error("Job: {} - An issue with the annotations has stopped them from being processed.", this.job_id); Config.INSTANCE.logger.error("Error: {}", e.getMessage()); } } /** * Waits on job steps that are running and responds to timeouts. * @throws InterruptedException */ private void waitOnRunningJobSteps() throws InterruptedException { while(submittedJobStepsRunning()) { submittedJobStepTimeout(); if(isTimedOut()) { cancelTask(); Thread.currentThread().interrupt(); } Thread.sleep(200); } } /** * Check if the job steps already submitted to run are complete. * @return */ private boolean submittedJobStepsRunning() { boolean jobsteps_running = false; for (Future<?> future : future_map.values()) { if (!future.isDone()) { jobsteps_running = true; break; } } return jobsteps_running; } /** * Cancels JobSteps which have timed out prior to finishing. */ private void submittedJobStepTimeout() { for (JobStep job_step : future_map.keySet()) { final Future<?> future = future_map.get(job_step); if(job_step.isTimedOut() && !future.isDone()) { future.cancel(true); } } } /** * Returns if the job is timed out or not. * @return */ public boolean isTimedOut() { if(null != job_timeout && null != start_time) { return System.currentTimeMillis() - start_time > job_timeout; } else { return false; } } /** * Should stop any long running process the thread was doing to exit gracefully as quickly as possible. */ @Override public void cancelTask() { for (Future<?> future : future_map.values()) { if (!future.isDone()) { future.cancel(true); } } } public int getJobId() { return job_id; } public int getJobLogId() { return job_log_id; } public String getJobName() { return job_name; } public void setJobStepList(List<JobStep> job_step_list) { this.job_step_list = job_step_list; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tinkerpop.gremlin.process.computer; import org.apache.commons.configuration.Configuration; import org.apache.tinkerpop.gremlin.structure.Graph; import org.apache.tinkerpop.gremlin.structure.Vertex; import java.lang.reflect.Constructor; import java.util.Collections; import java.util.Optional; import java.util.Set; /** * A {@link VertexProgram} represents one component of a distributed graph computation. Each vertex in the graph * (logically) executes the {@link VertexProgram} instance in parallel. The collective behavior yields * the computational result. In practice, a "worker" (i.e. task, thread, etc.) is responsible for executing the * VertexProgram against each vertex that it has in its vertex set (a subset of the full graph vertex set). * At minimum there is one "worker" for each vertex, though this is impractical in practice and {@link GraphComputer} * implementations that leverage such a design are not expected to perform well due to the excess object creation. * Any local state/fields in a VertexProgram is static to the vertices within the same worker set. * It is not safe to assume that the VertexProgram's "worker" state will remain stable between iterations. * Hence, the existence of {@link VertexProgram#workerIterationStart} and {@link VertexProgram#workerIterationEnd}. * * @author Marko A. Rodriguez (http://markorodriguez.com) * @author Matthias Broecheler (me@matthiasb.com) */ public interface VertexProgram<M> extends Cloneable { public static final String VERTEX_PROGRAM = "gremlin.vertexProgram"; /** * When it is necessary to store the state of the VertexProgram, this method is called. * This is typically required when the VertexProgram needs to be serialized to another machine. * Note that what is stored is simply the instance/configuration state, not any processed data. * The default implementation provided simply stores the VertexProgram class name for reflective reconstruction. * It is typically a good idea to VertexProgram.super.storeState(). * * @param configuration the configuration to store the state of the VertexProgram in. */ public default void storeState(final Configuration configuration) { configuration.setProperty(VERTEX_PROGRAM, this.getClass().getName()); } /** * When it is necessary to load the state of the VertexProgram, this method is called. * This is typically required when the VertexProgram needs to be serialized to another machine. * Note that what is loaded is simply the instance state, not any processed data. * * @param graph the graph that the VertexProgram will run against * @param configuration the configuration to load the state of the VertexProgram from. */ public default void loadState(final Graph graph, final Configuration configuration) { } /** * The method is called at the beginning of the computation. * The method is global to the {@link GraphComputer} and as such, is not called for each vertex. * During this stage, the {@link Memory} should be initialized to to its "start state." * * @param memory The global memory of the GraphComputer */ public void setup(final Memory memory); /** * This method denotes the main body of the computation and is executed on each vertex in the graph. * This method is logically executed in parallel on all vertices in the graph. * When the {@link Memory} is read, it is according to the aggregated state yielded in the previous iteration. * When the {@link Memory} is written, the data will be aggregated at the end of the iteration for reading in the next iteration. * * @param vertex the {@link Vertex} to execute the {@link VertexProgram} on * @param messenger the messenger that moves data between vertices * @param memory the shared state between all vertices in the computation */ public void execute(final Vertex vertex, final Messenger<M> messenger, final Memory memory); /** * The method is called at the end of each iteration to determine if the computation is complete. * The method is global to the {@link GraphComputer} and as such, is not called for each {@link Vertex}. * The {@link Memory} maintains the aggregated data from the last execute() iteration. * * @param memory The global memory of the {@link GraphComputer} * @return whether or not to halt the computation */ public boolean terminate(final Memory memory); /** * This method is called at the start of each iteration of each "computational chunk." * The set of vertices in the graph are typically not processed with full parallelism. * The vertex set is split into subsets and a worker is assigned to call the {@link VertexProgram#execute} method. * The default implementation is a no-op. * * @param memory The memory at the start of the iteration. */ public default void workerIterationStart(final Memory memory) { } /** * This method is called at the end of each iteration of each "computational chunk." * The set of vertices in the graph are typically not processed with full parallelism. * The vertex set is split into subsets and a worker is assigned to call the {@link VertexProgram#execute} method. * The default implementation is a no-op. * * @param memory The memory at the end of the iteration. */ public default void workerIterationEnd(final Memory memory) { } /** * The {@link org.apache.tinkerpop.gremlin.structure.Element} properties that will be mutated during the computation. * All properties in the graph are readable, but only the keys specified here are writable. * The default is an empty set. * * @return the set of element keys that will be mutated during the vertex program's execution */ public default Set<VertexComputeKey> getVertexComputeKeys() { return Collections.emptySet(); } /** * The {@link Memory} keys that will be used during the computation. * These are the only keys that can be read or written throughout the life of the {@link GraphComputer}. * The default is an empty set. * * @return the set of memory keys that will be read/written */ public default Set<MemoryComputeKey> getMemoryComputeKeys() { return Collections.emptySet(); } /** * Combine the messages in route to a particular vertex. Useful to reduce the amount of data transmitted over the wire. * For example, instead of sending two objects that will ultimately be merged at the vertex destination, merge/combine into one and send that object. * If no message combiner is provider, then no messages will be combined. * Furthermore, it is not guaranteed the all messages in route to the vertex will be combined and thus, combiner-state should not be used. * The result of the vertex program algorithm should be the same regardless of whether message combining is executed or not. * * @return A optional denoting whether or not their is a message combine associated with the vertex program. */ public default Optional<MessageCombiner<M>> getMessageCombiner() { return Optional.empty(); } /** * This method returns all the {@link MessageScope} possibilities for a particular iteration of the vertex program. * The returned messages scopes are the scopes that will be used to send messages during the stated iteration. * It is not a requirement that all stated messages scopes be used, just that it is possible that they be used during the iteration. * * @param memory an immutable form of the {@link Memory} * @return all possible message scopes during said vertex program iteration */ public Set<MessageScope> getMessageScopes(final Memory memory); /** * The set of {@link MapReduce} jobs that are associated with the {@link VertexProgram}. * This is not necessarily the exhaustive list over the life of the {@link GraphComputer}. * If MapReduce jobs are declared by GraphComputer.mapReduce(), they are not contained in this set. * The default is an empty set. * * @return the set of {@link MapReduce} jobs associated with this {@link VertexProgram} */ public default Set<MapReduce> getMapReducers() { return Collections.emptySet(); } /** * When multiple workers on a single machine need VertexProgram instances, it is possible to use clone. * This will provide a speedier way of generating instances, over the {@link VertexProgram#storeState} and {@link VertexProgram#loadState} model. * The default implementation simply returns the object as it assumes that the VertexProgram instance is a stateless singleton. * * @return A clone of the VertexProgram object */ @SuppressWarnings("CloneDoesntDeclareCloneNotSupportedException") public VertexProgram<M> clone(); public GraphComputer.ResultGraph getPreferredResultGraph(); public GraphComputer.Persist getPreferredPersist(); /** * A helper method to construct a {@link VertexProgram} given the content of the supplied configuration. * The class of the VertexProgram is read from the {@link VertexProgram#VERTEX_PROGRAM} static configuration key. * Once the VertexProgram is constructed, {@link VertexProgram#loadState} method is called with the provided graph and configuration. * * @param graph The graph that the vertex program will execute against * @param configuration A configuration with requisite information to build a vertex program * @param <V> The vertex program type * @return the newly constructed vertex program */ public static <V extends VertexProgram> V createVertexProgram(final Graph graph, final Configuration configuration) { try { final Class<V> vertexProgramClass = (Class) Class.forName(configuration.getString(VERTEX_PROGRAM)); final Constructor<V> constructor = vertexProgramClass.getDeclaredConstructor(); constructor.setAccessible(true); final V vertexProgram = constructor.newInstance(); vertexProgram.loadState(graph, configuration); return vertexProgram; } catch (final Exception e) { throw new IllegalStateException(e.getMessage(), e); } } public interface Builder { /** * This method should only be used by the underlying compute engine. For VertexProgram configurations, please * use specific fluent methods off the builder. */ public Builder configure(final Object... keyValues); public <P extends VertexProgram> P create(final Graph graph); } public default Features getFeatures() { return new Features() { }; } public interface Features { public default boolean requiresGlobalMessageScopes() { return false; } public default boolean requiresLocalMessageScopes() { return false; } public default boolean requiresVertexAddition() { return false; } public default boolean requiresVertexRemoval() { return false; } public default boolean requiresVertexPropertyAddition() { return false; } public default boolean requiresVertexPropertyRemoval() { return false; } public default boolean requiresEdgeAddition() { return false; } public default boolean requiresEdgeRemoval() { return false; } public default boolean requiresEdgePropertyAddition() { return false; } public default boolean requiresEdgePropertyRemoval() { return false; } } }
package me.brainmix.itemapi.api.controllers; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import me.brainmix.itemapi.api.ItemRegister; import org.bukkit.Bukkit; import org.bukkit.Material; import org.bukkit.block.Block; import org.bukkit.block.BlockFace; import org.bukkit.entity.Item; import org.bukkit.entity.Player; import org.bukkit.event.EventHandler; import org.bukkit.event.Listener; import org.bukkit.event.block.Action; import org.bukkit.event.entity.EntityDamageByEntityEvent; import org.bukkit.event.inventory.InventoryClickEvent; import org.bukkit.event.player.*; import org.bukkit.inventory.ItemStack; import me.brainmix.itemapi.api.CustomItem; import me.brainmix.itemapi.api.ItemOptions; import me.brainmix.itemapi.api.events.*; import me.brainmix.itemapi.api.events.ItemLeftClickEntityEvent.DamageData; import me.brainmix.itemapi.api.interfaces.Clickable; import me.brainmix.itemapi.api.interfaces.EntityClickable; import me.brainmix.itemapi.api.utils.Actionbar; import me.brainmix.itemapi.api.utils.Interactables; import me.brainmix.itemapi.api.utils.ItemUtils; public class ItemManager extends AbstractItemManager implements Listener { public static int WAIT_FOR_DAMAGEEVENT_TICKS = 2; public static int BETWEEN_INTERACT_AT_ENTITY = 3; public static int RIGHT_CLICK_HOLD_MS = 250; public static int RIGHT_CLICK_RELEASE_TICKS = 7; private Set<CustomItem> items = new HashSet<>(); private Set<CustomItem> isHandleAfter = new HashSet<>(); private Map<Player, Long> rightClicked = new HashMap<>(); private Set<Player> leftClicked = new HashSet<>(); private Map<Player, Long> lastRightClick = new HashMap<>(); private InteractConsumer leftClickConsumer = (ev, i) -> { ItemLeftClickEvent e = new ItemLeftClickEvent(ev.getPlayer(), ev.getPlayer().getItemInHand(), i.getDelayManager().getTimeLeft(ev.getPlayer()), ev.getAction(), ev.getClickedBlock(), ev.getBlockFace()); getRegister().getEventManager().callEvent(i, e); if(e.isCancelled()) ev.setCancelled(true); if(i.getDelayManager().getTimeLeft(ev.getPlayer()) == -1 && getRegister().getEventManager().hasWithoutDelay(i.getId(), ItemLeftClickEvent.class)) handleAfter(i, ev.getPlayer(), ev.getPlayer().getItemInHand(), i.getOptions()); }; private InteractConsumer clickConsumer = (ev, i) -> { if((getRegister().getEventManager().hasWith(i.getId(), ItemRightClickEntityEvent.class) || getRegister().getEventManager().hasWith(i.getId(), ItemRightClickPlayerEvent.class)) && rightClicked.containsKey(ev.getPlayer()) && System.currentTimeMillis() - rightClicked.get(ev.getPlayer()) <= BETWEEN_INTERACT_AT_ENTITY) return; ItemClickEvent e = new ItemClickEvent(ev.getPlayer(), ev.getPlayer().getItemInHand(), i.getDelayManager().getTimeLeft(ev.getPlayer()), ev.getAction(), ev.getClickedBlock(), ev.getBlockFace()); getRegister().getEventManager().callEvent(i, e); if(e.isCancelled()) e.setCancelled(true); if(i.getDelayManager().getTimeLeft(ev.getPlayer()) == -1 && getRegister().getEventManager().hasWithoutDelay(i.getId(), ItemClickEvent.class)) handleAfter(i, ev.getPlayer(), ev.getPlayer().getItemInHand(), i.getOptions()); }; public ItemManager(ItemRegister register, Set<CustomItem> items) { super(register); this.items = items; Bukkit.getPluginManager().registerEvents(this, register.getPlugin()); } public void setItems(Set<CustomItem> items) { this.items = items; } @EventHandler public void onInteract(PlayerInteractEvent event) { Player player = event.getPlayer(); ItemStack clickedItem = player.getItemInHand(); items.forEach(item -> { if(item.compare(clickedItem) && item instanceof Clickable) handleClickableItem(event, item); }); } @EventHandler public void onDrop(PlayerDropItemEvent event) { items.forEach(item -> { if(item.compare(event.getItemDrop().getItemStack())){ if(!item.getOptions().isDrop()) event.setCancelled(true); ItemDropEvent e = new ItemDropEvent(event.getPlayer(), event.getItemDrop().getItemStack(), item.getDelayManager().getTimeLeft(event.getPlayer()), event.getItemDrop()); getRegister().getEventManager().callEvent(item, e); if(e.isCancelled()) event.setCancelled(true); } }); Bukkit.getOnlinePlayers().stream().map(getRegister().getUserManager()::getUser).forEach(u -> { if(u.isFreezed(event.getItemDrop().getItemStack())) event.setCancelled(true); }); } @EventHandler public void onMove(InventoryClickEvent event) { items.forEach(item -> { if((item.compare(event.getCurrentItem()) || item.compare(event.getCursor())) && !item.getOptions().isMove()) event.setCancelled(true); }); Bukkit.getOnlinePlayers().stream().map(getRegister().getUserManager()::getUser).forEach(u -> { if(u.isFreezed(event.getCursor()) || u.isFreezed(event.getCurrentItem())) event.setCancelled(true); }); } @EventHandler public void onPlayerItemHeld(PlayerItemHeldEvent event) { items.forEach(item -> { if(item.compare(event.getPlayer().getInventory().getItem(event.getPreviousSlot()))) { if(item.getDelayManager().hasDelay(event.getPlayer())) { Actionbar.send(event.getPlayer(), ""); } } }); } @EventHandler public void onEntityDamage(EntityDamageByEntityEvent event) { if(!(event.getDamager() instanceof Player)) return; Player damager = (Player) event.getDamager(); items.forEach(item -> { if(damager.getItemInHand() != null && item.compare(damager.getItemInHand()) && item instanceof EntityClickable) { if(item.getOptions().isDisabled()) return; int delay = item.getDelayManager().getTimeLeft(damager); if(item.getOptions().isCancellDefaults()) event.setCancelled(true); ItemLeftClickEntityEvent e = new ItemLeftClickEntityEvent(damager, damager.getItemInHand(), delay, event.getEntity(), event.getCause()); boolean called = getRegister().getEventManager().callEvent(item, e); if(event.getEntity() instanceof Player) { ItemLeftClickPlayerEvent e1 = new ItemLeftClickPlayerEvent(damager, damager.getItemInHand(), delay, (Player) event.getEntity(), event.getCause()); boolean called1 = getRegister().getEventManager().callEvent(item, e1); } if(leftClicked.contains(damager)) leftClicked.remove(damager); DamageData damageData = e.getDamageData(); if(damageData != null) { if(damageData.getModifier() != null) event.setDamage(damageData.getModifier(), damageData.getDamage()); else event.setDamage(damageData.getDamage()); } if(called) event.setCancelled(e.isCancelled()); } }); } @EventHandler public void onEntityInteract(PlayerInteractAtEntityEvent event) { Player player = event.getPlayer(); items.forEach(item -> { if(player.getItemInHand() != null && item.compare(player.getItemInHand()) && item instanceof EntityClickable) { if(item.getOptions().isDisabled()) return; int delay = item.getDelayManager().getTimeLeft(player); if(item.getOptions().isCancellDefaults()) event.setCancelled(true); ItemRightClickEntityEvent e = new ItemRightClickEntityEvent(player, player.getItemInHand(), delay, event.getRightClicked(), event.getClickedPosition()); boolean called = getRegister().getEventManager().callEvent(item, e); if(called) rightClicked.put(player, System.currentTimeMillis()); if(e.isCancelled()) event.setCancelled(true); } }); } @EventHandler public void onPlayerPickupItem(PlayerPickupItemEvent event) { Item itemEntity = event.getItem(); Player player = event.getPlayer(); items.forEach(item -> { if(item.compare(itemEntity.getItemStack())) { int delay = item.getDelayManager().getTimeLeft(player); if(item.getOptions().isCancellDefaults()) event.setCancelled(true); ItemPickupEvent e = new ItemPickupEvent(player, itemEntity.getItemStack(), delay, itemEntity, event.getRemaining()); boolean called = getRegister().getEventManager().callEvent(item, e); if(e.isCancelled()) event.setCancelled(true); } }); } @EventHandler public void onItemDespawn(org.bukkit.event.entity.ItemDespawnEvent event) { Item itemEntity = event.getEntity(); items.forEach(item -> { if(item.compare(itemEntity.getItemStack())) { if(item.getOptions().isCancellDefaults()) event.setCancelled(true); if(!item.getOptions().isDespawnable()) event.setCancelled(true); ItemDespawnEvent e = new ItemDespawnEvent(itemEntity.getItemStack(), itemEntity, event.getLocation()); getRegister().getEventManager().callEvent(item, e); if(e.isCancelled()) event.setCancelled(true); } }); } private void handleClickableItem(PlayerInteractEvent event, CustomItem item) { Player player = event.getPlayer(); ItemStack clickedItem = player.getItemInHand(); Action action = event.getAction(); Block block = event.getClickedBlock(); BlockFace face = event.getBlockFace(); ItemOptions options = item.getOptions(); String id = item.getId(); if(isHandleAfter.contains(item)) isHandleAfter.remove(item); if(options.isDisabled()) return; int delay = item.getDelayManager().getTimeLeft(player); if(options.isInteractWithOthers() && action == Action.RIGHT_CLICK_BLOCK && Interactables.get().contains(block.getType())) return; if(options.isCancellDefaults()) event.setCancelled(true); if(action != Action.PHYSICAL && getRegister().getEventManager().hasWith(id, ItemClickEvent.class)) { if(item instanceof EntityClickable && getRegister().getEventManager().hasWith(id, ItemLeftClickEntityEvent.class) && (action == Action.LEFT_CLICK_BLOCK || action == Action.LEFT_CLICK_AIR)) { leftClicked.add(player); Bukkit.getScheduler().scheduleSyncDelayedTask(getRegister().getPlugin(), () -> { if(leftClicked.contains(player)) { clickConsumer.handle(event, item); leftClicked.remove(player); } }, WAIT_FOR_DAMAGEEVENT_TICKS); } else { clickConsumer.handle(event, item); } } if((action == Action.RIGHT_CLICK_AIR || action == Action.RIGHT_CLICK_BLOCK) && (getRegister().getEventManager().hasWith(id, ItemRightClickEvent.class) || getRegister().getEventManager().hasWith(id, ItemRightClickHoldEvent.class))) { /* Avoid handling ItemRightClickEvent if ItemRightClickEntityEvent gets called */ long current = System.currentTimeMillis(); if(getRegister().getEventManager().hasWith(id, ItemRightClickEntityEvent.class) && rightClicked.containsKey(player) && System.currentTimeMillis() - rightClicked.get(player) <= BETWEEN_INTERACT_AT_ENTITY) return; if(!lastRightClick.containsKey(player)) lastRightClick.put(player, current); long lastClick = current - lastRightClick.get(player); lastRightClick.put(player, current); ItemRightClickEvent e = new ItemRightClickEvent(player, clickedItem, delay, action,event.getClickedBlock(), event.getBlockFace(), lastClick, lastClick != 0 && lastClick <= RIGHT_CLICK_HOLD_MS); getRegister().getEventManager().callEvent(item, e); if(e.isCancelled()) event.setCancelled(true); if(lastClick != 0 && lastClick <= (long) RIGHT_CLICK_HOLD_MS) { ItemRightClickHoldEvent rightClickHoldEvent = new ItemRightClickHoldEvent(player, clickedItem, delay, action, event.getClickedBlock(), event.getBlockFace(), lastClick, true); getRegister().getEventManager().callEvent(item, rightClickHoldEvent); if(rightClickHoldEvent.isCancelled()) event.setCancelled(true); if(getRegister().getEventManager().hasWith(item.getId(), ItemRightClickReleaseEvent.class)) { Bukkit.getScheduler().scheduleSyncDelayedTask(getRegister().getPlugin(), () -> { if(System.currentTimeMillis() - lastRightClick.getOrDefault(player, (long) 0) > RIGHT_CLICK_HOLD_MS) { ItemRightClickReleaseEvent itemRightClickReleaseEvent = new ItemRightClickReleaseEvent(player, clickedItem, delay, action, event.getClickedBlock(), event.getBlockFace(), System.currentTimeMillis() - lastRightClick.getOrDefault(player, (long) 0), false); getRegister().getEventManager().callEvent(item, itemRightClickReleaseEvent); if(itemRightClickReleaseEvent.isCancelled()) event.setCancelled(true); } }, RIGHT_CLICK_RELEASE_TICKS); } } if(delay == -1 && getRegister().getEventManager().hasWithoutDelay(id, ItemRightClickEvent.class)) handleAfter(item, player, clickedItem, options); } if((action == Action.LEFT_CLICK_BLOCK || action == Action.LEFT_CLICK_AIR) && getRegister().getEventManager().hasWith(id, ItemLeftClickEvent.class)) { /* if player left clicked an entity already the leftClick code below wont handle * playerinteract event gets called before entitydamage event, so put the code into a consumer, * wait some ticks and if the entitydamage event didn't get called, handle the normal leftclickevent */ if(item instanceof EntityClickable && getRegister().getEventManager().hasWith(id, ItemLeftClickEntityEvent.class)) { leftClicked.add(player); Bukkit.getScheduler().scheduleSyncDelayedTask(getRegister().getPlugin(), () -> { if(leftClicked.contains(player)) { leftClickConsumer.handle(event, item); leftClicked.remove(player); } }, WAIT_FOR_DAMAGEEVENT_TICKS); } else { leftClickConsumer.handle(event, item); } } if((action == Action.RIGHT_CLICK_BLOCK || action == Action.LEFT_CLICK_BLOCK) && getRegister().getEventManager().hasWith(id, ItemClickBlockEvent.class)) { ItemClickBlockEvent e = new ItemClickBlockEvent(player, clickedItem, delay, action, block, face); getRegister().getEventManager().callEvent(item, e); if(e.isCancelled()) event.setCancelled(true); if(delay == -1 && getRegister().getEventManager().hasWithoutDelay(id, ItemClickBlockEvent.class)) handleAfter(item, player, clickedItem, options); } if(action == Action.LEFT_CLICK_BLOCK && getRegister().getEventManager().hasWith(id, ItemLeftClickBlockEvent.class)) { ItemLeftClickBlockEvent e = new ItemLeftClickBlockEvent(player, clickedItem, delay, action, block, face); getRegister().getEventManager().callEvent(item, e); if(e.isCancelled()) event.setCancelled(true); if(delay == -1 && getRegister().getEventManager().hasWithoutDelay(id, ItemLeftClickBlockEvent.class)) handleAfter(item, player, clickedItem, options); } if(action == Action.RIGHT_CLICK_BLOCK && getRegister().getEventManager().hasWith(id, ItemRightClickBlockEvent.class)) { ItemRightClickBlockEvent e = new ItemRightClickBlockEvent(player, clickedItem, delay, action, block, face); getRegister().getEventManager().callEvent(item, e); if(e.isCancelled()) event.setCancelled(true); if(delay == -1 && getRegister().getEventManager().hasWithoutDelay(id, ItemRightClickBlockEvent.class)) handleAfter(item, player, clickedItem, options); } } private void handleAfter(CustomItem item, Player player, ItemStack clickedItem, ItemOptions options) { if(isHandleAfter.contains(item)) return; isHandleAfter.add(item); if(options.getClickSound() != null) { options.getClickSound().play(player); } if(options.getItemAfterUse() != null) { if(item.compare(player.getItemInHand())) { player.setItemInHand(null); Bukkit.getScheduler().scheduleSyncDelayedTask(getRegister().getPlugin(), () -> player.setItemInHand(options.getItemAfterUse()), 1); } else { ItemUtils.replaceItem(player, clickedItem, options.getItemAfterUse()); } } if(options.getRemoveAfterUse() != 0) { int amount = clickedItem.getAmount(); amount -= options.getRemoveAfterUse(); if(amount <= 0) { player.setItemInHand(new ItemStack(Material.AIR)); } else { clickedItem.setAmount(amount); player.setItemInHand(clickedItem); } } if(options.getAutoItemDelay() != null) { item.getDelayManager().startDelay(player, options.getAutoItemDelay()); } } private interface InteractConsumer { void handle(PlayerInteractEvent event, CustomItem item); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.client.migration; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Objects; /** * Information about which system features need to be upgraded before the next * major version. */ public class GetFeatureUpgradeStatusResponse { private static final ParseField FEATURE_UPGRADE_STATUSES = new ParseField("features"); private static final ParseField UPGRADE_STATUS = new ParseField("migration_status"); private final List<FeatureUpgradeStatus> featureUpgradeStatuses; private final String upgradeStatus; @SuppressWarnings("unchecked") private static final ConstructingObjectParser<GetFeatureUpgradeStatusResponse, Void> PARSER = new ConstructingObjectParser<>( "get_feature_upgrade_response", true, (a, ctx) -> new GetFeatureUpgradeStatusResponse((List<FeatureUpgradeStatus>) a[0], (String) a[1]) ); static { PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), FeatureUpgradeStatus::parse, FEATURE_UPGRADE_STATUSES); PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> p.text(), UPGRADE_STATUS, ObjectParser.ValueType.STRING); } /** * Constructor for the response object * @param featureUpgradeStatuses A list of feature, their upgrade statuses, and other relevant information for upgrading * @param upgradeStatus Does this feature need to be upgraded or not? */ public GetFeatureUpgradeStatusResponse(List<FeatureUpgradeStatus> featureUpgradeStatuses, String upgradeStatus) { this.featureUpgradeStatuses = Objects.nonNull(featureUpgradeStatuses) ? featureUpgradeStatuses : Collections.emptyList(); this.upgradeStatus = upgradeStatus; } public static GetFeatureUpgradeStatusResponse parse(XContentParser parser) throws IOException { return PARSER.apply(parser, null); } public List<FeatureUpgradeStatus> getFeatureUpgradeStatuses() { return featureUpgradeStatuses; } public String getUpgradeStatus() { return upgradeStatus; } /** * This class represents a particular feature and whether it needs to be upgraded. */ public static class FeatureUpgradeStatus { private final String featureName; private final String minimumIndexVersion; private final String upgradeStatus; private final List<IndexVersion> indexVersions; private static final ParseField FEATURE_NAME = new ParseField("feature_name"); private static final ParseField MINIMUM_INDEX_VERSION = new ParseField("minimum_index_version"); private static final ParseField UPGRADE_STATUS = new ParseField("migration_status"); private static final ParseField INDEX_VERSIONS = new ParseField("indices"); @SuppressWarnings("unchecked") private static final ConstructingObjectParser<FeatureUpgradeStatus, Void> PARSER = new ConstructingObjectParser<>( "feature_upgrade_status", true, (a, ctx) -> new FeatureUpgradeStatus((String) a[0], (String) a[1], (String) a[2], (List<IndexVersion>) a[3]) ); static { PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> p.text(), FEATURE_NAME, ObjectParser.ValueType.STRING); PARSER.declareField( ConstructingObjectParser.constructorArg(), (p, c) -> p.text(), MINIMUM_INDEX_VERSION, ObjectParser.ValueType.STRING ); PARSER.declareField( ConstructingObjectParser.constructorArg(), (p, c) -> p.text(), UPGRADE_STATUS, ObjectParser.ValueType.STRING ); PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), IndexVersion::parse, INDEX_VERSIONS); } /** * A feature upgrade status object * @param featureName Name of the feature * @param minimumIndexVersion The earliest version of Elasticsearch used to create one of this feature's system indices * @param upgradeStatus Whether this feature needs to be upgraded * @param indexVersions A list of individual indices and which version of Elasticsearch created them */ public FeatureUpgradeStatus( String featureName, String minimumIndexVersion, String upgradeStatus, List<IndexVersion> indexVersions ) { this.featureName = featureName; this.minimumIndexVersion = minimumIndexVersion; this.upgradeStatus = upgradeStatus; this.indexVersions = indexVersions; } public static FeatureUpgradeStatus parse(XContentParser parser, Void ctx) { return PARSER.apply(parser, null); } public String getFeatureName() { return featureName; } public String getMinimumIndexVersion() { return minimumIndexVersion; } public String getUpgradeStatus() { return upgradeStatus; } public List<IndexVersion> getIndexVersions() { return indexVersions; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; FeatureUpgradeStatus that = (FeatureUpgradeStatus) o; return Objects.equals(featureName, that.featureName) && Objects.equals(minimumIndexVersion, that.minimumIndexVersion) && Objects.equals(upgradeStatus, that.upgradeStatus) && Objects.equals(indexVersions, that.indexVersions); } @Override public int hashCode() { return Objects.hash(featureName, minimumIndexVersion, upgradeStatus, indexVersions); } } /** * A class representing an index and the version of Elasticsearch that created it. */ public static class IndexVersion { private final String indexName; private final String version; /** * Constructor * @param indexName Name of a concrete index * @param version Version of Elasticsearch used to create the index */ public IndexVersion(String indexName, String version) { this.indexName = indexName; this.version = version; } private static final ParseField INDEX_NAME = new ParseField("index"); private static final ParseField VERSION = new ParseField("version"); private static final ConstructingObjectParser<IndexVersion, Void> PARSER = new ConstructingObjectParser<>( "index_version", true, (a, ctx) -> new IndexVersion((String) a[0], (String) a[1]) ); static { PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> p.text(), INDEX_NAME, ObjectParser.ValueType.STRING); PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> p.text(), VERSION, ObjectParser.ValueType.STRING); } public static IndexVersion parse(XContentParser parser, Void ctx) { return PARSER.apply(parser, ctx); } public String getIndexName() { return indexName; } public String getVersion() { return version; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; IndexVersion that = (IndexVersion) o; return Objects.equals(indexName, that.indexName) && Objects.equals(version, that.version); } @Override public int hashCode() { return Objects.hash(indexName, version); } } }