code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
// // Program Name - S2_Box.cpp // Series: GetOnToC++ Step: 2 // // Purpose: This program demonstrates declaration of Member Variables of Box // class, creation of Box Object and calculating volume of the Box. // // Compile: g++ S2_Box.cpp -o S2_Box // Execute: ./S2_Box // // Created by Narayan Mahadevan on 18/08/13. // Copyright (c) 2013 MakeTechEz. All rights reserved. // #include <iostream> using namespace std; class Box { public: double mLength; // Length of a box double mWidth; // Width of a box double mHeight; // Height of a box }; /* * This function calculates the Volume of the Box * Input Param: box Object of user defined data type Box * return: volume of the Box * Note: Here the scope of box Parameters is local to this function * and hence it is call-by-value and the box object will have its own * memory */ double volumeOfBox(Box box) { double boxCarVol = 0.0; // Box Car Volume Computation using Arithmatic Operations boxCarVol = box.mHeight * box.mWidth * box.mLength; // Returns the Box Car Volume return boxCarVol; } int main( ) { // Creating box1 and box2 as Object of Box Class Box box1; // Declare instance box1 of type Box Box box2; // Declare instance box2 of type Box // Since memory is allocated, box1 and box2 will have definite // memory size. // Calculating the size of box1 Object cout << "Size of box1 Object = " << sizeof(box1) << endl; // Calculating the size of box2 Object cout << "Size of box2 Object = " << sizeof(box2) << endl; double volume = 0.0; // Store the volume of a box here // box 1 specification. Assigning value to member variables box1.mHeight = 4.0; box1.mLength = 6.0; box1.mWidth = 8.0; // box 2 specification box2.mHeight = 10.0; box2.mLength = 12.0; box2.mWidth = 14.0; // volume of box 1 volume = volumeOfBox(box1); cout << "Volume of Box1 : " << volume <<endl; // volume of box 2 volume = volumeOfBox(box2); cout << "Volume of Box2 : " << volume <<endl; return 0; }
NarayanMahadevan/MakeTechEz
GetOnToC++/Step_2/S2_Box.cpp
C++
apache-2.0
2,448
/* * Copyright © 2002-2011 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections; using System.Collections.Generic; using System.IO; using System.Reflection; using System.Runtime.Serialization; using Solenoid.Expressions.Parser; using Solenoid.Expressions.Parser.antlr; using Solenoid.Expressions.Parser.antlr.collections; using Solenoid.Expressions.Support; using Solenoid.Expressions.Support.Reflection.Dynamic; using Solenoid.Expressions.Support.Util; using StringUtils = Solenoid.Expressions.Support.Util.StringUtils; namespace Solenoid.Expressions { /// <summary> /// Container object for the parsed expression. /// </summary> /// <remarks> /// <p> /// Preparing this object once and reusing it many times for expression /// evaluation can result in significant performance improvements, as /// expression parsing and reflection lookups are only performed once. /// </p> /// </remarks> /// <author>Aleksandar Seovic</author> [Serializable] public class Expression : BaseNode { /// <summary> /// Contains a list of reserved variable names. /// You must not use any variable names with the reserved prefix! /// </summary> public static class ReservedVariableNames { /// <summary> /// Variable Names using this prefix are reserved for internal framework use /// </summary> public const string ReservedPrefix = "____spring_"; /// <summary> /// variable name of the currently processed object factory, if any /// </summary> internal static readonly string CurrentObjectFactory = ReservedPrefix + "CurrentObjectFactory"; } private class AstNodeCreator : ASTNodeCreator { private readonly SafeConstructor _ctor; private readonly string _name; public AstNodeCreator(ConstructorInfo ctor) { _ctor = new SafeConstructor(ctor); _name = ctor.DeclaringType.FullName; } public override AST Create() { return (AST) _ctor.Invoke(new object[0]); } public override string ASTNodeTypeName { get { return _name; } } } private class SpringAstFactory : ASTFactory { private static readonly Type _basenodeType; private static readonly Hashtable _typename2Creator; static SpringAstFactory() { _basenodeType = typeof (SerializableNode); _typename2Creator = new Hashtable(); foreach (var type in typeof (SpringAstFactory).Assembly.GetTypes()) { if (_basenodeType.IsAssignableFrom(type)) { var ctor = type.GetConstructor(new Type[0]); if (ctor != null) { var creator = new AstNodeCreator(ctor); _typename2Creator[creator.ASTNodeTypeName] = creator; } } } _typename2Creator[_basenodeType.FullName] = Creator; } public SpringAstFactory() : base(_basenodeType) { defaultASTNodeTypeObject_ = _basenodeType; typename2creator_ = _typename2Creator; } } private class SolenoidExpressionParser : ExpressionParser { public SolenoidExpressionParser(TokenStream lexer) : base(lexer) { astFactory = new SpringAstFactory(); initialize(); } } static Expression() { // Ensure antlr is loaded (fixes GAC issues)! var antlrAss = typeof (LLkParser).Assembly; } /// <summary> /// Initializes a new instance of the <see cref="Expression" /> class /// by parsing specified expression string. /// </summary> /// <param name="expression">Expression to parse.</param> public static IExpression Parse(string expression) { if (StringUtils.HasText(expression)) { var lexer = new ExpressionLexer(new StringReader(expression)); ExpressionParser parser = new SolenoidExpressionParser(lexer); try { parser.expr(); } catch (TokenStreamRecognitionException ex) { throw new SyntaxErrorException(ex.recog.Message, ex.recog.Line, ex.recog.Column, expression); } return (IExpression) parser.getAST(); } return new Expression(); } /// <summary> /// Registers lambda expression under the specified <paramref name="functionName" />. /// </summary> /// <param name="functionName">Function name to register expression as.</param> /// <param name="lambdaExpression">Lambda expression to register.</param> /// <param name="variables">Variables dictionary that the function will be registered in.</param> public static void RegisterFunction(string functionName, string lambdaExpression, IDictionary variables) { AssertUtils.ArgumentHasText(functionName, "functionName"); AssertUtils.ArgumentHasText(lambdaExpression, "lambdaExpression"); var lexer = new ExpressionLexer(new StringReader(lambdaExpression)); ExpressionParser parser = new SolenoidExpressionParser(lexer); try { parser.lambda(); } catch (TokenStreamRecognitionException ex) { throw new SyntaxErrorException(ex.recog.Message, ex.recog.Line, ex.recog.Column, lambdaExpression); } variables[functionName] = parser.getAST(); } /// <summary> /// Initializes a new instance of the <see cref="Expression" /> class /// by parsing specified primary expression string. /// </summary> /// <param name="expression">Primary expression to parse.</param> internal static IExpression ParsePrimary(string expression) { if (StringUtils.HasText(expression)) { var lexer = new ExpressionLexer(new StringReader(expression)); ExpressionParser parser = new SolenoidExpressionParser(lexer); try { parser.primaryExpression(); } catch (TokenStreamRecognitionException ex) { throw new SyntaxErrorException(ex.recog.Message, ex.recog.Line, ex.recog.Column, expression); } return (IExpression) parser.getAST(); } return new Expression(); } /// <summary> /// Initializes a new instance of the <see cref="Expression" /> class /// by parsing specified property expression string. /// </summary> /// <param name="expression">Property expression to parse.</param> internal static IExpression ParseProperty(string expression) { if (StringUtils.HasText(expression)) { var lexer = new ExpressionLexer(new StringReader(expression)); ExpressionParser parser = new SolenoidExpressionParser(lexer); try { parser.property(); } catch (TokenStreamRecognitionException ex) { throw new SyntaxErrorException(ex.recog.Message, ex.recog.Line, ex.recog.Column, expression); } return (IExpression) parser.getAST(); } return new Expression(); } /// <summary> /// Initializes a new instance of the <see cref="Expression" /> class. /// </summary> public Expression() { } /// <summary> /// Create a new instance from SerializationInfo /// </summary> protected Expression(SerializationInfo info, StreamingContext context) : base(info, context) { } /// <summary> /// Evaluates this expression for the specified root object and returns /// value of the last node. /// </summary> /// <param name="context">Context to evaluate expressions against.</param> /// <param name="evalContext">Current expression evaluation context.</param> /// <returns>Value of the last node.</returns> protected override object Get(object context, EvaluationContext evalContext) { var result = context; if (getNumberOfChildren() > 0) { var node = getFirstChild(); while (node != null) { result = GetValue(((BaseNode) node), result, evalContext); node = node.getNextSibling(); } } return result; } /// <summary> /// Evaluates this expression for the specified root object and sets /// value of the last node. /// </summary> /// <param name="context">Context to evaluate expressions against.</param> /// <param name="evalContext">Current expression evaluation context.</param> /// <param name="newValue">Value to set last node to.</param> /// <exception cref="NotSupportedException">If navigation expression is empty.</exception> protected override void Set(object context, EvaluationContext evalContext, object newValue) { var target = context; if (getNumberOfChildren() > 0) { var node = getFirstChild(); for (var i = 0; i < getNumberOfChildren() - 1; i++) { try { target = GetValue(((BaseNode) node), target, evalContext); node = node.getNextSibling(); } catch (NotReadablePropertyException e) { throw new NotWritablePropertyException( "Cannot read the value of '" + node.getText() + "' property in the expression.", e); } } SetValue(((BaseNode) node), target, evalContext, newValue); } else { throw new NotSupportedException("You cannot set the value for an empty expression."); } } /// <summary> /// Evaluates this expression for the specified root object and returns /// <see cref="PropertyInfo" /> of the last node, if possible. /// </summary> /// <param name="context">Context to evaluate expression against.</param> /// <param name="variables">Expression variables map.</param> /// <returns>Value of the last node.</returns> internal PropertyInfo GetPropertyInfo(object context, IDictionary<string, object> variables) { if (getNumberOfChildren() > 0) { var target = context; var node = getFirstChild(); for (var i = 0; i < getNumberOfChildren() - 1; i++) { target = ((IExpression) node).GetValue(target, variables); node = node.getNextSibling(); } var fieldNode = node as PropertyOrFieldNode; if (fieldNode != null) { return (PropertyInfo) fieldNode.GetMemberInfo(target); } var indexerNode = node as IndexerNode; if (indexerNode != null) { return indexerNode.GetPropertyInfo(target, variables); } throw new FatalReflectionException( "Cannot obtain PropertyInfo from an expression that does not resolve to a property or an indexer."); } throw new FatalReflectionException("Cannot obtain PropertyInfo for empty property name."); } } }
jakesays/Solenoid-Expressions
SolenoidExpressions/Expression.cs
C#
apache-2.0
11,002
/* * Copyright 2018 Esri * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.esri.arcgisruntime.sample.generateofflinemapwithlocalbasemap; import android.graphics.Color; import android.os.Bundle; import android.util.Log; import android.widget.Button; import android.widget.Toast; import androidx.appcompat.app.AppCompatActivity; import com.esri.arcgisruntime.ArcGISRuntimeEnvironment; import com.esri.arcgisruntime.concurrent.Job; import com.esri.arcgisruntime.concurrent.ListenableFuture; import com.esri.arcgisruntime.geometry.Envelope; import com.esri.arcgisruntime.geometry.Point; import com.esri.arcgisruntime.loadable.LoadStatus; import com.esri.arcgisruntime.mapping.ArcGISMap; import com.esri.arcgisruntime.mapping.view.Graphic; import com.esri.arcgisruntime.mapping.view.GraphicsOverlay; import com.esri.arcgisruntime.mapping.view.MapView; import com.esri.arcgisruntime.portal.Portal; import com.esri.arcgisruntime.portal.PortalItem; import com.esri.arcgisruntime.symbology.SimpleLineSymbol; import com.esri.arcgisruntime.tasks.offlinemap.GenerateOfflineMapJob; import com.esri.arcgisruntime.tasks.offlinemap.GenerateOfflineMapParameters; import com.esri.arcgisruntime.tasks.offlinemap.GenerateOfflineMapResult; import com.esri.arcgisruntime.tasks.offlinemap.OfflineMapTask; import java.io.File; import java.util.concurrent.ExecutionException; public class MainActivity extends AppCompatActivity implements ProgressDialogFragment.OnProgressDialogDismissListener, LocalBasemapAlertDialogFragment.OnClickListener { private static final String TAG = MainActivity.class.getSimpleName(); private MapView mMapView; private Button mTakeMapOfflineButton; private GraphicsOverlay mGraphicsOverlay; private Graphic mDownloadArea; private GenerateOfflineMapJob mGenerateOfflineMapJob; private GenerateOfflineMapParameters mGenerateOfflineMapParameters; private String mLocalBasemapDirectory; private OfflineMapTask mOfflineMapTask; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); // authentication with an API key or named user is required // to access basemaps and other location services ArcGISRuntimeEnvironment.setApiKey(BuildConfig.API_KEY); // get a reference to the map view mMapView = findViewById(R.id.mapView); // access button to take the map offline and disable it until a download area has been defined mTakeMapOfflineButton = findViewById(R.id.takeMapOfflineButton); mTakeMapOfflineButton.setEnabled(false); // create a portal item with the itemId of the web map Portal portal = new Portal(getString(R.string.portal_url), false); PortalItem portalItem = new PortalItem(portal, getString(R.string.item_id)); // create a map with the portal item and set to the map view ArcGISMap map = new ArcGISMap(portalItem); map.addDoneLoadingListener(() -> { if (map.getLoadStatus() == LoadStatus.LOADED) { // limit the map scale to the largest layer scale map.setMaxScale(map.getOperationalLayers().get(6).getMaxScale()); map.setMinScale(map.getOperationalLayers().get(6).getMinScale()); // enable the take map offline button only after the map is loaded mTakeMapOfflineButton.setEnabled(true); } else { String error = "Map failed to load: " + map.getLoadError().getMessage(); Toast.makeText(this, error, Toast.LENGTH_LONG).show(); Log.e(TAG, error); } }); mMapView.setMap(map); // create a graphics overlay for the map view mGraphicsOverlay = new GraphicsOverlay(); mMapView.getGraphicsOverlays().add(mGraphicsOverlay); // create a graphic to show a box around the extent we want to download mDownloadArea = new Graphic(); mGraphicsOverlay.getGraphics().add(mDownloadArea); SimpleLineSymbol simpleLineSymbol = new SimpleLineSymbol(SimpleLineSymbol.Style.SOLID, Color.RED, 2); mDownloadArea.setSymbol(simpleLineSymbol); // update the download area box whenever the viewpoint changes mMapView.addViewpointChangedListener(viewpointChangedEvent -> { updateDownloadArea(); }); // when the button is clicked, start the offline map task job mTakeMapOfflineButton.setOnClickListener(v -> { updateDownloadArea(); // specify the extent, min scale, and max scale as parameters double minScale = mMapView.getMapScale(); double maxScale = mMapView.getMap().getMaxScale(); // minScale must always be larger than maxScale if (minScale <= maxScale) { minScale = maxScale + 1; } // create an offline map task with the map mOfflineMapTask = new OfflineMapTask(mMapView.getMap()); // create default generate offline map parameters ListenableFuture<GenerateOfflineMapParameters> generateOfflineMapParametersFuture = mOfflineMapTask .createDefaultGenerateOfflineMapParametersAsync(mDownloadArea.getGeometry(), minScale, maxScale); generateOfflineMapParametersFuture.addDoneListener(() -> { try { mGenerateOfflineMapParameters = generateOfflineMapParametersFuture.get(); // set the path to the references basemap directory mGenerateOfflineMapParameters.setReferenceBasemapFilename(getString(R.string.naperville_tpkx)); mGenerateOfflineMapParameters.setReferenceBasemapDirectory(getExternalFilesDir(null).getAbsolutePath()); // name of local basemap file as supplied by the map's author String localBasemapFileName = mGenerateOfflineMapParameters.getReferenceBasemapFilename(); // check if the offline map parameters include reference to a basemap file if (!localBasemapFileName.isEmpty()) { // search for the given file name in the app's scoped storage File localBasemapFile = searchForFile(getExternalFilesDir(null), localBasemapFileName); // if a file of the given name was found if (localBasemapFile != null) { // get the file's directory mLocalBasemapDirectory = localBasemapFile.getParent(); showLocalBasemapAlertDialog(localBasemapFileName); Log.i(TAG, "Local basemap file found in: " + mLocalBasemapDirectory); } else { String error = "Local basemap file " + localBasemapFileName + " not found!"; Toast.makeText(this, error, Toast.LENGTH_LONG).show(); Log.e(TAG, error); } } else { String message = "The map's author has not specified a local basemap"; Toast.makeText(this, message, Toast.LENGTH_LONG).show(); Log.i(TAG, message); } } catch (ExecutionException | InterruptedException e) { String error = "Error creating generate offline map parameters: " + e.getMessage(); Toast.makeText(this, error, Toast.LENGTH_LONG).show(); Log.e(TAG, error); } }); }); } /** * Function to update the download area on map viewpoint change * or on button click. */ private void updateDownloadArea() { // upper left corner of the area to take offline android.graphics.Point minScreenPoint = new android.graphics.Point(200, 200); // lower right corner of the downloaded area android.graphics.Point maxScreenPoint = new android.graphics.Point(mMapView.getWidth() - 200, mMapView.getHeight() - 200); // convert screen points to map points Point minPoint = mMapView.screenToLocation(minScreenPoint); Point maxPoint = mMapView.screenToLocation(maxScreenPoint); // use the points to define and return an envelope if (minPoint != null && maxPoint != null) { Envelope envelope = new Envelope(minPoint, maxPoint); mDownloadArea.setGeometry(envelope); } } /** * Use the generate offline map job to generate an offline map. */ private void generateOfflineMap() { // cancel previous job request if (mGenerateOfflineMapJob != null) { mGenerateOfflineMapJob.cancel(); } mTakeMapOfflineButton.setEnabled(false); // delete any offline map already in the cache String tempDirectoryPath = getCacheDir() + File.separator + "offlineMap"; deleteDirectory(new File(tempDirectoryPath)); // create an offline map job with the download directory path and parameters and start the job mGenerateOfflineMapJob = mOfflineMapTask.generateOfflineMap(mGenerateOfflineMapParameters, tempDirectoryPath); // replace the current map with the result offline map when the job finishes mGenerateOfflineMapJob.addJobDoneListener(() -> { if (mGenerateOfflineMapJob.getStatus() == Job.Status.SUCCEEDED) { GenerateOfflineMapResult result = mGenerateOfflineMapJob.getResult(); mMapView.setMap(result.getOfflineMap()); mGraphicsOverlay.getGraphics().clear(); findProgressDialogFragment().dismiss(); Toast.makeText(this, "Now displaying offline map.", Toast.LENGTH_LONG).show(); } else { String error = "Error in generate offline map job: " + mGenerateOfflineMapJob.getError().getAdditionalMessage(); Toast.makeText(this, error, Toast.LENGTH_LONG).show(); Log.e(TAG, error); } }); // start the job mGenerateOfflineMapJob.start(); if (findProgressDialogFragment() == null) { ProgressDialogFragment progressDialogFragment = ProgressDialogFragment.newInstance( getString(R.string.generate_offline_map_job_title), getString(R.string.taking_map_offline_message), getString(R.string.cancel) ); progressDialogFragment.show(getSupportFragmentManager(), ProgressDialogFragment.class.getSimpleName()); } // show the job's progress with the progress dialog mGenerateOfflineMapJob.addProgressChangedListener(() -> { if (findProgressDialogFragment() != null) { findProgressDialogFragment().setProgress(mGenerateOfflineMapJob.getProgress()); } }); } /** * Creates an alert notifying the user that a local basemap has been found on the device and asks whether the user * wishes to use that basemap, rather than download one with the rest of the generate offline map job. */ private void showLocalBasemapAlertDialog(String localBasemapFileName) { if (getSupportFragmentManager().findFragmentByTag(LocalBasemapAlertDialogFragment.class.getSimpleName()) == null) { LocalBasemapAlertDialogFragment localBasemapAlertFragment = LocalBasemapAlertDialogFragment.newInstance( getString(R.string.local_basemap_found), getString(R.string.local_basemap_found_message, localBasemapFileName), getString(R.string.yes), getString(R.string.no) ); localBasemapAlertFragment .show(getSupportFragmentManager(), LocalBasemapAlertDialogFragment.class.getSimpleName()); } } /** * Callback from the local basemap alert dialog. Sets the reference basemap directory and calls generateOfflineMap(). */ @Override public void onPositiveClick() { // set the directory of the local base map to the parameters mGenerateOfflineMapParameters.setReferenceBasemapDirectory(mLocalBasemapDirectory); // call generate offline map with parameters which now contain a reference basemap directory generateOfflineMap(); } /** * Callback from the local basemap alert dialog. Leaves the reference basemap directory empty and calls * generateOfflineMap(). */ @Override public void onNegativeClick() { // call generate offline map with parameters which contain an empty string for reference basemap directory generateOfflineMap(); } /** * Find and return the progress dialog fragment. * @return the progress dialog fragment. */ private ProgressDialogFragment findProgressDialogFragment() { return (ProgressDialogFragment) getSupportFragmentManager() .findFragmentByTag(ProgressDialogFragment.class.getSimpleName()); } @Override public void onProgressDialogDismiss() { if (mGenerateOfflineMapJob != null) { mGenerateOfflineMapJob.cancel(); } } @Override protected void onPause() { mMapView.pause(); super.onPause(); } @Override protected void onResume() { super.onResume(); mMapView.resume(); } @Override protected void onDestroy() { mMapView.dispose(); super.onDestroy(); } /** * Recursively search the given file for the given file name. * * @param file to search in * @param fileName to search for * @return the file being searched for or, of not found, null */ private static File searchForFile(File file, String fileName) { if (file.isDirectory()) { File[] arr = file.listFiles(); for (File f : arr) { File found = searchForFile(f, fileName); if (found != null) return found; } } else { if (file.getName().equals(fileName)) { return file; } } return null; } /** * Recursively deletes all files in the given directory. * @param file to delete */ private static void deleteDirectory(File file) { if (file.isDirectory()) for (File subFile : file.listFiles()) { deleteDirectory(subFile); } if (!file.delete()) { Log.e(TAG, "Failed to delete file: " + file.getPath()); } } }
Esri/arcgis-runtime-samples-android
java/generate-offline-map-with-local-basemap/src/main/java/com/esri/arcgisruntime/sample/generateofflinemapwithlocalbasemap/MainActivity.java
Java
apache-2.0
14,060
package ua.com.lsd25.controller.handler.register; import org.apache.log4j.Logger; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.ExceptionHandler; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestControllerAdvice; import ua.com.lsd25.controller.rest.ServerResponse; /** * @author vzagnitko */ @RestControllerAdvice public class UserRegisterAlreadyRegisterHandlingController { private static final Logger LOG = Logger.getLogger(UserRegisterAlreadyRegisterHandlingController.class); @ResponseStatus(HttpStatus.CONFLICT) @ExceptionHandler(UserAlreadyRegisterException.class) public ResponseEntity<ServerResponse> validationHandler(UserAlreadyRegisterException exc) { LOG.error(exc); int status = HttpStatus.CONFLICT.value(); return ResponseEntity.status(status).body(new ServerResponse(exc.getLocalizedMessage(), status)); } }
vzagnitko/BluetoothMusicServer
src/main/java/ua/com/lsd25/controller/handler/register/UserRegisterAlreadyRegisterHandlingController.java
Java
apache-2.0
1,032
<?php /** * Application level Controller * * This file is application-wide controller file. You can put all * application-wide controller-related methods here. * * PHP 5 * * CakePHP(tm) : Rapid Development Framework (http://cakephp.org) * Copyright 2005-2012, Cake Software Foundation, Inc. (http://cakefoundation.org) * * Licensed under The MIT License * Redistributions of files must retain the above copyright notice. * * @copyright Copyright 2005-2012, Cake Software Foundation, Inc. (http://cakefoundation.org) * @link http://cakephp.org CakePHP(tm) Project * @package app.Controller * @since CakePHP(tm) v 0.2.9 * @license MIT License (http://www.opensource.org/licenses/mit-license.php) */ App::uses('Controller', 'Controller','File','Utility'); /** * Application Controller * * Add your application-wide methods in the class below, your controllers * will inherit them. * * @package app.Controller * @link http://book.cakephp.org/2.0/en/controllers.html#the-app-controller */ class AppController extends Controller { public $components = array('Auth', 'Session', 'RequestHandler','Readtsv'); public $uses = array('User','Category','Test','Question','Answer','Comment'); public function beforeFilter() { $this->disableCache(); $this->Auth->authenticate = array( 'Form' => array( 'userModel' => 'User', 'fields' => array('username' => 'Username', 'password' => 'Password'), 'scope' => array('Status' => 1), ) ); $this->Auth->loginAction = array('controller' => 'users', 'action' => 'login'); $this->Auth->loginError = 'ユーザー名又はパスワードが間違った。'; $this->Auth->authError = 'このページを表示するために、ログインしてください。'; $this->set('categories', $this->Category->getCategories()); $this->set('userType', $this->Auth->user('UserType')); } function _loggedIn(){ $logged_in = FALSE; if($this->Auth->user()){ if( $this->Auth->user('nv_del_flg') == 0){ $logged_in = TRUE; } } return $logged_in; } //Lay gia tri username hien tai function _usersUsername(){ $users_username=NULL; if($this->Auth->user()){ // debug($this->Auth->user()); $users_username['FullName'] = $this->Auth->user('FullName'); $users_username['UserId'] = $this->Auth->user('UserId'); $users_username['Username'] = $this->Auth->user('Username'); } return $users_username; } function _usersType(){ $UserType = NULL; if($this->Auth->user()){ $UserType = $this->Auth->user('UserType'); }; return $UserType; } function beforeRender() { $this->set('pageTitle', $this->pageTitle); $this->set('logged_in',$this->_loggedIn()); $this->set('users_username',$this->_usersUsername()); $this->set('user_type',$this->_usersType()); } }
vinhle91/elearning
app/Controller/AppController.php
PHP
apache-2.0
2,958
/* * Copyright (c) 2010-2014 Evolveum and contributors * * This work is dual-licensed under the Apache License 2.0 * and European Union Public License. See LICENSE file for details. */ package com.evolveum.midpoint.repo.sql; /** * @author mederly */ public class SerializationRelatedException extends RuntimeException { public SerializationRelatedException(String message) { super(message); } public SerializationRelatedException(Throwable ex) { super(ex); } }
bshp/midPoint
repo/repo-sql-impl/src/main/java/com/evolveum/midpoint/repo/sql/SerializationRelatedException.java
Java
apache-2.0
504
// [[[[INFO> // Copyright 2015 Raging Bool (http://ragingbool.org, https://github.com/RagingBool) // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // For more information check https://github.com/RagingBool/RagingBool.Carcosa // ]]]] namespace RagingBool.Carcosa.Core { internal sealed class StartMessage : TimedMessage { public StartMessage(double time) : base(time) { } } }
RagingBool/RagingBool.Carcosa
projects/RagingBool.Carcosa.Core_cs/StartMessage.cs
C#
apache-2.0
916
/* * Copyright (c) 2017 Michael Krotscheck * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. * */ package net.krotscheck.kangaroo.common.cors; import com.google.common.net.HttpHeaders; import net.krotscheck.kangaroo.test.jersey.ContainerTest; import org.glassfish.jersey.internal.inject.InjectionManager; import org.glassfish.jersey.internal.inject.Injections; import org.glassfish.jersey.server.ResourceConfig; import org.junit.Test; import javax.ws.rs.GET; import javax.ws.rs.HttpMethod; import javax.ws.rs.OPTIONS; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.container.ContainerResponseFilter; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MultivaluedHashMap; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import static org.hamcrest.core.IsCollectionContaining.hasItems; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; /** * Unit tests for the CORS filter. * * @author Michael Krotscheck */ public final class CORSFilterTest extends ContainerTest { /** * Create an application. * * @return The application to test. */ @Override protected ResourceConfig createApplication() { ResourceConfig config = new ResourceConfig(); config.register(CORSFilter.class); config.register(new AllowedHeaders(new String[]{ "One", "Two", "Three", "" })); config.register(new ExposedHeaders(new String[]{ "One", "Two", "Three", "" })); // Inject a set of basic HTTP methods. config.register(new AllowedMethods(new String[]{ HttpMethod.GET, HttpMethod.OPTIONS, "" })); config.register(new TestCORSValidator.Binder()); config.register(MockService.class); return config; } /** * Assert that we can inject values using this binder. * * @throws Exception An authenticator exception. */ @Test public void testBinder() throws Exception { InjectionManager injector = Injections.createInjectionManager(); injector.register(new TestCORSValidator.Binder()); injector.register(new CORSFilter.Binder()); List<ContainerResponseFilter> crfList = injector.getAllInstances(ContainerResponseFilter.class); assertEquals(1, crfList.size()); // assert singleton. ContainerResponseFilter crf2 = injector.getInstance(ContainerResponseFilter.class); assertSame(crf2, crfList.get(0)); injector.shutdown(); } /** * Validate that the full set of expected headers (and values) are in the * received headers. * * @param expected List of expected headers & values. * @param received List of received headers. */ private void validateContainsHeaders( final MultivaluedMap<String, Object> expected, final MultivaluedMap<String, Object> received) { expected.forEach((key, values) -> { assertTrue(received.containsKey(key)); assertThat(received.get(key), hasItems(values.toArray())); }); } /** * If the Origin header is not present terminate this set of steps. The * request is outside the scope of this specification. */ @Test public void testRegularWithNoOrigin() { MultivaluedMap<String, Object> reqHeaders = new MultivaluedHashMap<>(); reqHeaders.add("One", "One"); reqHeaders.add("Two", "Two"); reqHeaders.add("Three", "Three"); Response r = this.target("/") .request() .headers(reqHeaders) .build("GET") .invoke(); MultivaluedMap<String, Object> expHeaders = new MultivaluedHashMap<>(); expHeaders.add(HttpHeaders.VARY, HttpHeaders.ORIGIN); List<String> omittedHeaders = new ArrayList<>(); omittedHeaders.addAll(Arrays.asList( HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, HttpHeaders.ACCESS_CONTROL_EXPOSE_HEADERS, HttpHeaders.ACCESS_CONTROL_MAX_AGE)); assertEquals(200, r.getStatus()); validateContainsHeaders(expHeaders, r.getHeaders()); validateOmitHeaders(omittedHeaders, r.getHeaders()); } /** * Validate the omitted headers. * * @param omittedHeaders The headers we do not want to see in a response. * @param received The headers received. */ private void validateOmitHeaders( final List<String> omittedHeaders, final MultivaluedMap<String, Object> received) { long foundHeaders = received.keySet().stream() .filter(omittedHeaders::contains) .count(); assertEquals(0, foundHeaders); } /** * If the value of the Origin header is not a case-sensitive match for any * of the values in list of origins, do not set any additional headers and * terminate this set of steps. */ @Test public void testRegularWithUnregisteredOrigin() { MultivaluedMap<String, Object> reqHeaders = new MultivaluedHashMap<>(); reqHeaders.add(HttpHeaders.ORIGIN, "http://invalid.example.com"); reqHeaders.add("One", "One"); reqHeaders.add("Two", "Two"); reqHeaders.add("Three", "Three"); Response r = this.target("/") .request() .headers(reqHeaders) .build("GET") .invoke(); MultivaluedMap<String, Object> expHeaders = new MultivaluedHashMap<>(); expHeaders.add(HttpHeaders.VARY, HttpHeaders.ORIGIN); List<String> omittedHeaders = new ArrayList<>(); omittedHeaders.addAll(Arrays.asList( HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, HttpHeaders.ACCESS_CONTROL_EXPOSE_HEADERS, HttpHeaders.ACCESS_CONTROL_MAX_AGE)); assertEquals(200, r.getStatus()); validateContainsHeaders(expHeaders, r.getHeaders()); validateOmitHeaders(omittedHeaders, r.getHeaders()); } /** * If the HTTP method is not one of the permitted methods, do not set any * additional headers and terminate this set of steps. */ @Test public void testRegularWithInvalidMethod() { MultivaluedMap<String, Object> reqHeaders = new MultivaluedHashMap<>(); reqHeaders.add(HttpHeaders.ORIGIN, "http://valid.example.com"); reqHeaders.add("One", "One"); reqHeaders.add("Two", "Two"); reqHeaders.add("Three", "Three"); Response r = this.target("/") .request() .headers(reqHeaders) .build("POST") // Not registered in the test app. .invoke(); MultivaluedMap<String, Object> expHeaders = new MultivaluedHashMap<>(); expHeaders.add(HttpHeaders.VARY, HttpHeaders.ORIGIN); List<String> omittedHeaders = new ArrayList<>(); omittedHeaders.addAll(Arrays.asList( HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, HttpHeaders.ACCESS_CONTROL_EXPOSE_HEADERS, HttpHeaders.ACCESS_CONTROL_MAX_AGE)); assertEquals(200, r.getStatus()); validateContainsHeaders(expHeaders, r.getHeaders()); validateOmitHeaders(omittedHeaders, r.getHeaders()); } /** * If the list of exposed headers is not empty add one or more * Access-Control-Expose-Headers headers, with as values the header field * names given in the list of exposed headers. */ @Test public void testRegularWithValidOrigin() { MultivaluedMap<String, Object> reqHeaders = new MultivaluedHashMap<>(); reqHeaders.add(HttpHeaders.ORIGIN, "http://valid.example.com"); reqHeaders.add("One", "One"); reqHeaders.add("Two", "Two"); reqHeaders.add("Three", "Three"); Response r = this.target("/") .request() .headers(reqHeaders) .build("GET") .invoke(); MultivaluedMap<String, Object> expHeaders = new MultivaluedHashMap<>(); expHeaders.add(HttpHeaders.VARY, HttpHeaders.ORIGIN); expHeaders.add(HttpHeaders.ACCESS_CONTROL_EXPOSE_HEADERS, "one"); expHeaders.add(HttpHeaders.ACCESS_CONTROL_EXPOSE_HEADERS, "two"); expHeaders.add(HttpHeaders.ACCESS_CONTROL_EXPOSE_HEADERS, "three"); List<String> omittedHeaders = new ArrayList<>(); omittedHeaders.addAll(Arrays.asList( HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, HttpHeaders.ACCESS_CONTROL_MAX_AGE)); assertEquals(200, r.getStatus()); validateContainsHeaders(expHeaders, r.getHeaders()); validateOmitHeaders(omittedHeaders, r.getHeaders()); } /** * If the list of exposed headers is not empty add one or more * Access-Control-Expose-Headers headers, with as values the header field * names given in the list of exposed headers. */ @Test public void testRegularWithValidOriginAndUnregisteredHeaders() { MultivaluedMap<String, Object> reqHeaders = new MultivaluedHashMap<>(); reqHeaders.add(HttpHeaders.ORIGIN, "http://valid.example.com"); reqHeaders.add("One", "One"); reqHeaders.add("Three", "Three"); reqHeaders.add("Four", "Four"); reqHeaders.add("Five", "Five"); Response r = this.target("/") .request() .headers(reqHeaders) .build("GET") .invoke(); MultivaluedMap<String, Object> expHeaders = new MultivaluedHashMap<>(); expHeaders.add(HttpHeaders.VARY, HttpHeaders.ORIGIN); expHeaders.add(HttpHeaders.ACCESS_CONTROL_EXPOSE_HEADERS, "one"); expHeaders.add(HttpHeaders.ACCESS_CONTROL_EXPOSE_HEADERS, "three"); List<String> omittedHeaders = new ArrayList<>(); omittedHeaders.addAll(Arrays.asList( HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, HttpHeaders.ACCESS_CONTROL_MAX_AGE)); assertEquals(200, r.getStatus()); validateContainsHeaders(expHeaders, r.getHeaders()); validateOmitHeaders(omittedHeaders, r.getHeaders()); } /** * If the Origin header is not present terminate this set of steps. The * request is outside the scope of this specification. */ @Test public void testPreflightWithNoOrigin() { MultivaluedMap<String, Object> reqHeaders = new MultivaluedHashMap<>(); reqHeaders.addAll(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "GET"); reqHeaders.addAll(HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS, "One", "Two", "Three"); Response r = this.target("/any") .request() .headers(reqHeaders) .build("OPTIONS") .invoke(); MultivaluedMap<String, Object> expHeaders = new MultivaluedHashMap<>(); expHeaders.add(HttpHeaders.VARY, HttpHeaders.ORIGIN); List<String> omittedHeaders = new ArrayList<>(); omittedHeaders.addAll(Arrays.asList( HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, HttpHeaders.ACCESS_CONTROL_EXPOSE_HEADERS, HttpHeaders.ACCESS_CONTROL_MAX_AGE)); assertEquals(404, r.getStatus()); validateContainsHeaders(expHeaders, r.getHeaders()); validateOmitHeaders(omittedHeaders, r.getHeaders()); } /** * If the Origin header is not present terminate this set of steps. The * request is outside the scope of this specification. */ @Test public void testPreflightWithInvalidOrigin() { MultivaluedMap<String, Object> reqHeaders = new MultivaluedHashMap<>(); reqHeaders.addAll(HttpHeaders.ORIGIN, "http://invalid.example.com"); reqHeaders.addAll(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "GET"); reqHeaders.addAll(HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS, "One", "Two", "Three"); Response r = this.target("/any") .request() .headers(reqHeaders) .build("OPTIONS") .invoke(); MultivaluedMap<String, Object> expHeaders = new MultivaluedHashMap<>(); expHeaders.add(HttpHeaders.VARY, HttpHeaders.ORIGIN); List<String> omittedHeaders = new ArrayList<>(); omittedHeaders.addAll(Arrays.asList( HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, HttpHeaders.ACCESS_CONTROL_EXPOSE_HEADERS, HttpHeaders.ACCESS_CONTROL_MAX_AGE)); assertEquals(404, r.getStatus()); validateContainsHeaders(expHeaders, r.getHeaders()); validateOmitHeaders(omittedHeaders, r.getHeaders()); } /** * If the Origin header is not present terminate this set of steps. The * request is outside the scope of this specification. */ @Test public void testPreflightWithNoMethod() { MultivaluedMap<String, Object> reqHeaders = new MultivaluedHashMap<>(); reqHeaders.addAll(HttpHeaders.ORIGIN, "http://valid.example.com"); reqHeaders.addAll(HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS, "One", "Two", "Three"); Response r = this.target("/any") .request() .headers(reqHeaders) .build("OPTIONS") .invoke(); MultivaluedMap<String, Object> expHeaders = new MultivaluedHashMap<>(); expHeaders.add(HttpHeaders.VARY, HttpHeaders.ORIGIN); List<String> omittedHeaders = new ArrayList<>(); omittedHeaders.addAll(Arrays.asList( HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, HttpHeaders.ACCESS_CONTROL_EXPOSE_HEADERS, HttpHeaders.ACCESS_CONTROL_MAX_AGE)); assertEquals(200, r.getStatus()); validateContainsHeaders(expHeaders, r.getHeaders()); validateOmitHeaders(omittedHeaders, r.getHeaders()); } /** * If the Origin header is not present terminate this set of steps. The * request is outside the scope of this specification. */ @Test public void testPreflightWithInvalidMethod() { MultivaluedMap<String, Object> reqHeaders = new MultivaluedHashMap<>(); reqHeaders.addAll(HttpHeaders.ORIGIN, "http://valid.example.com"); reqHeaders.addAll(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "POST"); reqHeaders.addAll(HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS, "One", "Two", "Three"); Response r = this.target("/any") .request() .headers(reqHeaders) .build("OPTIONS") .invoke(); MultivaluedMap<String, Object> expHeaders = new MultivaluedHashMap<>(); expHeaders.add(HttpHeaders.VARY, HttpHeaders.ORIGIN); List<String> omittedHeaders = new ArrayList<>(); omittedHeaders.addAll(Arrays.asList( HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, HttpHeaders.ACCESS_CONTROL_EXPOSE_HEADERS, HttpHeaders.ACCESS_CONTROL_MAX_AGE)); assertEquals(200, r.getStatus()); validateContainsHeaders(expHeaders, r.getHeaders()); validateOmitHeaders(omittedHeaders, r.getHeaders()); } /** * If the requested headers are not in the list of exposed headers, do * not return them. */ @Test public void testPreflightWithInvalidHeaders() { MultivaluedMap<String, Object> reqHeaders = new MultivaluedHashMap<>(); reqHeaders.addAll(HttpHeaders.ORIGIN, "http://valid.example.com"); reqHeaders.addAll(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "GET"); reqHeaders.addAll(HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS, "Four", "Five", "Six"); Response r = this.target("/any") .request() .headers(reqHeaders) .build("OPTIONS") .invoke(); MultivaluedMap<String, Object> expHeaders = new MultivaluedHashMap<>(); expHeaders.add(HttpHeaders.VARY, HttpHeaders.ORIGIN); expHeaders.add(HttpHeaders.VARY, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS); expHeaders.add(HttpHeaders.VARY, HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD); expHeaders.add(HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, "GET"); expHeaders.add(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, "http://valid.example.com"); expHeaders.add(HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, "true"); expHeaders.add(HttpHeaders.ACCESS_CONTROL_MAX_AGE, "300"); List<String> omittedHeaders = new ArrayList<>(); omittedHeaders.addAll(Arrays.asList( HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, HttpHeaders.ACCESS_CONTROL_EXPOSE_HEADERS)); assertEquals(200, r.getStatus()); validateContainsHeaders(expHeaders, r.getHeaders()); validateOmitHeaders(omittedHeaders, r.getHeaders()); } /** * Test a valid preflight request. */ @Test public void testValidPreflight() { MultivaluedMap<String, Object> reqHeaders = new MultivaluedHashMap<>(); reqHeaders.addAll(HttpHeaders.ORIGIN, "http://valid.example.com"); reqHeaders.addAll(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "GET"); reqHeaders.addAll(HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS, "One", "Two", "Three"); Response r = this.target("/any") .request() .headers(reqHeaders) .build("OPTIONS") .invoke(); MultivaluedMap<String, Object> expHeaders = new MultivaluedHashMap<>(); expHeaders.add(HttpHeaders.VARY, HttpHeaders.ORIGIN); expHeaders.add(HttpHeaders.VARY, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS); expHeaders.add(HttpHeaders.VARY, HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD); expHeaders.add(HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, "GET"); expHeaders.add(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, "http://valid.example.com"); expHeaders.add(HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, "true"); expHeaders.add(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, "one"); expHeaders.add(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, "two"); expHeaders.add(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, "three"); expHeaders.add(HttpHeaders.ACCESS_CONTROL_MAX_AGE, "300"); List<String> omittedHeaders = new ArrayList<>(); omittedHeaders.addAll(Arrays.asList( HttpHeaders.ACCESS_CONTROL_EXPOSE_HEADERS)); assertEquals(200, r.getStatus()); validateContainsHeaders(expHeaders, r.getHeaders()); validateOmitHeaders(omittedHeaders, r.getHeaders()); } /** * Test a valid preflight request against a resource that has an options * handler. */ @Test public void testValidPreflightExistingResource() { MultivaluedMap<String, Object> reqHeaders = new MultivaluedHashMap<>(); reqHeaders.addAll(HttpHeaders.ORIGIN, "http://valid.example.com"); reqHeaders.addAll(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "GET"); reqHeaders.addAll(HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS, "One", "Two", "Three"); Response r = this.target("/") .request() .headers(reqHeaders) .build("OPTIONS") .invoke(); MultivaluedMap<String, Object> expHeaders = new MultivaluedHashMap<>(); expHeaders.add(HttpHeaders.VARY, HttpHeaders.ORIGIN); expHeaders.add(HttpHeaders.VARY, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS); expHeaders.add(HttpHeaders.VARY, HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD); expHeaders.add(HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, "GET"); expHeaders.add(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, "http://valid.example.com"); expHeaders.add(HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, "true"); expHeaders.add(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, "one"); expHeaders.add(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, "two"); expHeaders.add(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, "three"); expHeaders.add(HttpHeaders.ACCESS_CONTROL_MAX_AGE, "300"); expHeaders.add("Test", "One"); List<String> omittedHeaders = new ArrayList<>(); omittedHeaders.addAll(Arrays.asList( HttpHeaders.ACCESS_CONTROL_EXPOSE_HEADERS)); assertEquals(200, r.getStatus()); validateContainsHeaders(expHeaders, r.getHeaders()); validateOmitHeaders(omittedHeaders, r.getHeaders()); } /** * A simple endpoint that returns the system status. * * @author Michael Krotscheck */ @Path("/") public static final class MockService { /** * Always returns. * * @return HTTP Response object with some test headers. */ @OPTIONS public Response handleOptions() { return Response .status(Status.OK) .header("Test", "One") .build(); } /** * Always returns. * * @return HTTP Response object with some test headers. */ @GET @Produces(MediaType.APPLICATION_JSON) public Response handleGet() { return Response .status(Status.OK) .header("One", "One") .header("Two", "Two") .header("Three", "Three") .header("Four", "Four") .build(); } /** * Always returns. * * @return HTTP Response object with some test headers. */ @POST @Produces(MediaType.APPLICATION_JSON) public Response handlePost() { return Response .status(Status.OK) .header("One", "One") .header("Two", "Two") .header("Three", "Three") .header("Four", "Four") .build(); } } }
kangaroo-server/kangaroo
kangaroo-common/src/test/java/net/krotscheck/kangaroo/common/cors/CORSFilterTest.java
Java
apache-2.0
25,315
# Copyright (c) 2015 Intel Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock import six from sahara.plugins.cdh.v5_11_0 import edp_engine from sahara.plugins.cdh.v5_11_0 import versionhandler from sahara.tests.unit import base class VersionHandlerTestCase(base.SaharaTestCase): plugin_path = "sahara.plugins.cdh.v5_11_0." cloudera_utils_path = plugin_path + "cloudera_utils.ClouderaUtilsV5110." plugin_utils_path = plugin_path + "plugin_utils.PluginUtilsV5110." def setUp(self): super(VersionHandlerTestCase, self).setUp() self.vh = versionhandler.VersionHandler() def test_get_node_processes(self): processes = self.vh.get_node_processes() for k, v in six.iteritems(processes): for p in v: self.assertIsInstance(p, str) @mock.patch("sahara.conductor.API.cluster_update") @mock.patch("sahara.context.ctx") @mock.patch(plugin_path + "deploy.configure_cluster") @mock.patch(cloudera_utils_path + "get_cloudera_manager_info", return_value={"fake_cm_info": "fake"}) def test_config_cluster(self, get_cm_info, configure_cluster, ctx, cluster_update): cluster = mock.Mock() self.vh.configure_cluster(cluster) configure_cluster.assert_called_once_with(cluster) cluster_update.assert_called_once_with( ctx(), cluster, {'info': {"fake_cm_info": "fake"}}) @mock.patch(plugin_path + "deploy.start_cluster") def test_start_cluster(self, start_cluster): cluster = mock.Mock() self.vh._set_cluster_info = mock.Mock() self.vh.start_cluster(cluster) start_cluster.assert_called_once_with(cluster) self.vh._set_cluster_info.assert_called_once_with(cluster) @mock.patch(plugin_path + "deploy.decommission_cluster") def test_decommission_nodes(self, decommission_cluster): cluster = mock.Mock() instances = mock.Mock() self.vh.decommission_nodes(cluster, instances) decommission_cluster.assert_called_once_with(cluster, instances) @mock.patch(plugin_path + "deploy.scale_cluster") def test_scale_cluster(self, scale_cluster): cluster = mock.Mock() instances = mock.Mock() self.vh.scale_cluster(cluster, instances) scale_cluster.assert_called_once_with(cluster, instances) @mock.patch("sahara.conductor.API.cluster_update") @mock.patch("sahara.context.ctx") @mock.patch(cloudera_utils_path + "get_cloudera_manager_info", return_value={}) @mock.patch(plugin_utils_path + "get_hue") def test_set_cluster_info(self, get_hue, get_cloudera_manager_info, ctx, cluster_update): hue = mock.Mock() hue.get_ip_or_dns_name.return_value = "1.2.3.4" get_hue.return_value = hue cluster = mock.Mock() self.vh._set_cluster_info(cluster) info = {'info': {'Hue Dashboard': {'Web UI': 'http://1.2.3.4:8888'}}} cluster_update.assert_called_once_with(ctx(), cluster, info) @mock.patch("sahara.plugins.utils.get_instance") @mock.patch("sahara.plugins.utils.get_config_value_or_default") @mock.patch("sahara.service.edp.job_utils.get_plugin") def test_get_edp_engine(self, get_plugin, get_config_value_or_default, get_instance): cluster = mock.Mock() job_type = 'Java' ret = self.vh.get_edp_engine(cluster, job_type) self.assertIsInstance(ret, edp_engine.EdpOozieEngine) job_type = 'Spark' ret = self.vh.get_edp_engine(cluster, job_type) self.assertIsInstance(ret, edp_engine.EdpSparkEngine) job_type = 'unsupported' ret = self.vh.get_edp_engine(cluster, job_type) self.assertIsNone(ret) def test_get_edp_job_types(self): ret = self.vh.get_edp_job_types() expect = edp_engine.EdpOozieEngine.get_supported_job_types() + \ edp_engine.EdpSparkEngine.get_supported_job_types() self.assertEqual(expect, ret) @mock.patch(plugin_path + "edp_engine.EdpOozieEngine.get_possible_job_config", return_value={'job_config': {}}) def test_edp_config_hints(self, get_possible_job_config): job_type = mock.Mock() ret = self.vh.get_edp_config_hints(job_type) get_possible_job_config.assert_called_once_with(job_type) self.assertEqual(ret, {'job_config': {}}) @mock.patch(plugin_path + "deploy.get_open_ports", return_value=[1234]) def test_get_open_ports(self, get_open_ports): node_group = mock.Mock() ret = self.vh.get_open_ports(node_group) get_open_ports.assert_called_once_with(node_group) self.assertEqual(ret, [1234]) @mock.patch(plugin_utils_path + "recommend_configs") def test_recommend_configs(self, recommend_configs): cluster = mock.Mock() scaling = mock.Mock() self.vh.get_plugin_configs = mock.Mock() self.vh.recommend_configs(cluster, scaling) recommend_configs.assert_called_once_with(cluster, self.vh.get_plugin_configs(), scaling)
shakamunyi/sahara
sahara/tests/unit/plugins/cdh/v5_11_0/test_versionhandler.py
Python
apache-2.0
5,857
package com.jobhive.sakimonkey.data.response; import java.util.Date; import com.fasterxml.jackson.annotation.JsonFormat; import com.jobhive.sakimonkey.Defaults; /** * * @author Hussachai * */ public class WhitelistEntry { /** * the email that is whitelisted */ private String email; /** * a description of why the email was whitelisted */ private String detail; /** * when the email was added to the whitelist */ @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = Defaults.DATETIME_FORMAT, timezone = Defaults.TIME_ZONE) private Date createdAt; public String getEmail() { return email; } public String getDetail() { return detail; } public Date getCreatedAt() { return createdAt; } }
JobHive/saki-monkey
src/main/java/com/jobhive/sakimonkey/data/response/WhitelistEntry.java
Java
apache-2.0
868
/* * Copyright 1999-2018 Alibaba Group Holding Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.nacos.api.naming.pojo; import java.util.List; /** * ListView. * * @author nkorange */ public class ListView<T> { private List<T> data; private int count; public List<T> getData() { return data; } public void setData(List<T> data) { this.data = data; } public int getCount() { return count; } public void setCount(int count) { this.count = count; } @Override public String toString() { return "ListView{" + "data=" + data + ", count=" + count + '}'; } }
alibaba/nacos
api/src/main/java/com/alibaba/nacos/api/naming/pojo/ListView.java
Java
apache-2.0
1,223
sap.ui.define(['sap/ui/webc/common/thirdparty/base/config/Theme', './v5/attachment-video', './v4/attachment-video'], function (Theme, attachmentVideo$2, attachmentVideo$1) { 'use strict'; const pathData = Theme.isThemeFamily("sap_horizon") ? attachmentVideo$1 : attachmentVideo$2; var attachmentVideo = { pathData }; return attachmentVideo; });
SAP/openui5
src/sap.ui.webc.common/src/sap/ui/webc/common/thirdparty/icons/attachment-video.js
JavaScript
apache-2.0
351
/** * @license Copyright 2016 Google Inc. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ 'use strict'; const defaultConfigPath = './default-config.js'; const defaultConfig = require('./default-config.js'); const constants = require('./constants.js'); const i18n = require('./../lib/i18n/i18n.js'); const isDeepEqual = require('lodash.isequal'); const log = require('lighthouse-logger'); const path = require('path'); const Runner = require('../runner.js'); const ConfigPlugin = require('./config-plugin.js'); const Budget = require('./budget.js'); const {requireAudits, mergeOptionsOfItems, resolveModule} = require('./config-helpers.js'); /** @typedef {typeof import('../gather/gatherers/gatherer.js')} GathererConstructor */ /** @typedef {InstanceType<GathererConstructor>} Gatherer */ /** * Define with object literal so that tsc will require it to stay updated. * @type {Record<keyof LH.BaseArtifacts, ''>} */ const BASE_ARTIFACT_BLANKS = { fetchTime: '', LighthouseRunWarnings: '', TestedAsMobileDevice: '', HostFormFactor: '', HostUserAgent: '', NetworkUserAgent: '', BenchmarkIndex: '', WebAppManifest: '', Stacks: '', traces: '', devtoolsLogs: '', settings: '', URL: '', Timing: '', PageLoadError: '', }; const BASE_ARTIFACT_NAMES = Object.keys(BASE_ARTIFACT_BLANKS); /** * @param {Config['passes']} passes * @param {Config['audits']} audits */ function assertValidPasses(passes, audits) { if (!Array.isArray(passes)) { return; } const requiredGatherers = Config.getGatherersNeededByAudits(audits); // Base artifacts are provided by GatherRunner, so start foundGatherers with them. const foundGatherers = new Set(BASE_ARTIFACT_NAMES); // Log if we are running gathers that are not needed by the audits listed in the config passes.forEach((pass, passIndex) => { if (passIndex === 0 && pass.loadFailureMode !== 'fatal') { log.warn(`"${pass.passName}" is the first pass but was marked as non-fatal. ` + `The first pass will always be treated as loadFailureMode=fatal.`); pass.loadFailureMode = 'fatal'; } pass.gatherers.forEach(gathererDefn => { const gatherer = gathererDefn.instance; foundGatherers.add(gatherer.name); const isGatherRequiredByAudits = requiredGatherers.has(gatherer.name); if (!isGatherRequiredByAudits) { const msg = `${gatherer.name} gatherer requested, however no audit requires it.`; log.warn('config', msg); } }); }); // All required gatherers must be found in the config. Throw otherwise. for (const auditDefn of audits || []) { const auditMeta = auditDefn.implementation.meta; for (const requiredArtifact of auditMeta.requiredArtifacts) { if (!foundGatherers.has(requiredArtifact)) { throw new Error(`${requiredArtifact} gatherer, required by audit ${auditMeta.id}, ` + 'was not found in config.'); } } } // Passes must have unique `passName`s. Throw otherwise. const usedNames = new Set(); passes.forEach(pass => { const passName = pass.passName; if (usedNames.has(passName)) { throw new Error(`Passes must have unique names (repeated passName: ${passName}.`); } usedNames.add(passName); }); } /** * @param {Config['categories']} categories * @param {Config['audits']} audits * @param {Config['groups']} groups */ function assertValidCategories(categories, audits, groups) { if (!categories) { return; } const auditsKeyedById = new Map((audits || []).map(audit => /** @type {[string, LH.Config.AuditDefn]} */ ([audit.implementation.meta.id, audit]) )); Object.keys(categories).forEach(categoryId => { categories[categoryId].auditRefs.forEach((auditRef, index) => { if (!auditRef.id) { throw new Error(`missing an audit id at ${categoryId}[${index}]`); } const audit = auditsKeyedById.get(auditRef.id); if (!audit) { throw new Error(`could not find ${auditRef.id} audit for category ${categoryId}`); } const auditImpl = audit.implementation; const isManual = auditImpl.meta.scoreDisplayMode === 'manual'; if (categoryId === 'accessibility' && !auditRef.group && !isManual) { throw new Error(`${auditRef.id} accessibility audit does not have a group`); } if (auditRef.weight > 0 && isManual) { throw new Error(`${auditRef.id} is manual but has a positive weight`); } if (auditRef.group && (!groups || !groups[auditRef.group])) { throw new Error(`${auditRef.id} references unknown group ${auditRef.group}`); } }); }); } /** * @param {Gatherer} gathererInstance * @param {string=} gathererName */ function assertValidGatherer(gathererInstance, gathererName) { gathererName = gathererName || gathererInstance.name || 'gatherer'; if (typeof gathererInstance.beforePass !== 'function') { throw new Error(`${gathererName} has no beforePass() method.`); } if (typeof gathererInstance.pass !== 'function') { throw new Error(`${gathererName} has no pass() method.`); } if (typeof gathererInstance.afterPass !== 'function') { throw new Error(`${gathererName} has no afterPass() method.`); } } /** * Throws if pluginName is invalid or (somehow) collides with a category in the * configJSON being added to. * @param {LH.Config.Json} configJSON * @param {string} pluginName */ function assertValidPluginName(configJSON, pluginName) { if (!pluginName.startsWith('lighthouse-plugin-')) { throw new Error(`plugin name '${pluginName}' does not start with 'lighthouse-plugin-'`); } if (configJSON.categories && configJSON.categories[pluginName]) { throw new Error(`plugin name '${pluginName}' not allowed because it is the id of a category already found in config`); // eslint-disable-line max-len } } /** * Creates a settings object from potential flags object by dropping all the properties * that don't exist on Config.Settings. * @param {Partial<LH.Flags>=} flags * @return {RecursivePartial<LH.Config.Settings>} */ function cleanFlagsForSettings(flags = {}) { /** @type {RecursivePartial<LH.Config.Settings>} */ const settings = {}; for (const key of Object.keys(flags)) { if (key in constants.defaultSettings) { // @ts-ignore tsc can't yet express that key is only a single type in each iteration, not a union of types. settings[key] = flags[key]; } } return settings; } /** * More widely typed than exposed merge() function, below. * @param {Object<string, any>|Array<any>|undefined|null} base * @param {Object<string, any>|Array<any>} extension * @param {boolean=} overwriteArrays */ function _merge(base, extension, overwriteArrays = false) { // If the default value doesn't exist or is explicitly null, defer to the extending value if (typeof base === 'undefined' || base === null) { return extension; } else if (typeof extension === 'undefined') { return base; } else if (Array.isArray(extension)) { if (overwriteArrays) return extension; if (!Array.isArray(base)) throw new TypeError(`Expected array but got ${typeof base}`); const merged = base.slice(); extension.forEach(item => { if (!merged.some(candidate => isDeepEqual(candidate, item))) merged.push(item); }); return merged; } else if (typeof extension === 'object') { if (typeof base !== 'object') throw new TypeError(`Expected object but got ${typeof base}`); if (Array.isArray(base)) throw new TypeError('Expected object but got Array'); Object.keys(extension).forEach(key => { const localOverwriteArrays = overwriteArrays || (key === 'settings' && typeof base[key] === 'object'); base[key] = _merge(base[key], extension[key], localOverwriteArrays); }); return base; } return extension; } /** * Until support of jsdoc templates with constraints, type in config.d.ts. * See https://github.com/Microsoft/TypeScript/issues/24283 * @type {LH.Config.Merge} */ const merge = _merge; /** * @template T * @param {Array<T>} array * @return {Array<T>} */ function cloneArrayWithPluginSafety(array) { return array.map(item => { if (typeof item === 'object') { // Return copy of instance and prototype chain (in case item is instantiated class). return Object.assign( Object.create( Object.getPrototypeOf(item) ), item ); } return item; }); } /** * // TODO(bckenny): could adopt "jsonified" type to ensure T will survive JSON * round trip: https://github.com/Microsoft/TypeScript/issues/21838 * @template T * @param {T} json * @return {T} */ function deepClone(json) { return JSON.parse(JSON.stringify(json)); } /** * Deep clone a ConfigJson, copying over any "live" gatherer or audit that * wouldn't make the JSON round trip. * @param {LH.Config.Json} json * @return {LH.Config.Json} */ function deepCloneConfigJson(json) { const cloned = deepClone(json); // Copy arrays that could contain plugins to allow for programmatic // injection of plugins. if (Array.isArray(cloned.passes) && Array.isArray(json.passes)) { for (let i = 0; i < cloned.passes.length; i++) { const pass = cloned.passes[i]; pass.gatherers = cloneArrayWithPluginSafety(json.passes[i].gatherers || []); } } if (Array.isArray(json.audits)) { cloned.audits = cloneArrayWithPluginSafety(json.audits); } return cloned; } class Config { /** * @constructor * @implements {LH.Config.Json} * @param {LH.Config.Json=} configJSON * @param {LH.Flags=} flags */ constructor(configJSON, flags) { const status = {msg: 'Create config', id: 'lh:init:config'}; log.time(status, 'verbose'); let configPath = flags && flags.configPath; if (!configJSON) { configJSON = defaultConfig; configPath = path.resolve(__dirname, defaultConfigPath); } if (configPath && !path.isAbsolute(configPath)) { throw new Error('configPath must be an absolute path.'); } // We don't want to mutate the original config object configJSON = deepCloneConfigJson(configJSON); // Extend the default config if specified if (configJSON.extends) { configJSON = Config.extendConfigJSON(deepCloneConfigJson(defaultConfig), configJSON); } // The directory of the config path, if one was provided. const configDir = configPath ? path.dirname(configPath) : undefined; // Validate and merge in plugins (if any). configJSON = Config.mergePlugins(configJSON, flags, configDir); const settings = Config.initSettings(configJSON.settings, flags); // Augment passes with necessary defaults and require gatherers. const passesWithDefaults = Config.augmentPassesWithDefaults(configJSON.passes); Config.adjustDefaultPassForThrottling(settings, passesWithDefaults); const passes = Config.requireGatherers(passesWithDefaults, configDir); /** @type {LH.Config.Settings} */ this.settings = settings; /** @type {?Array<LH.Config.Pass>} */ this.passes = passes; /** @type {?Array<LH.Config.AuditDefn>} */ this.audits = Config.requireAudits(configJSON.audits, configDir); /** @type {?Record<string, LH.Config.Category>} */ this.categories = configJSON.categories || null; /** @type {?Record<string, LH.Config.Group>} */ this.groups = configJSON.groups || null; Config.filterConfigIfNeeded(this); assertValidPasses(this.passes, this.audits); assertValidCategories(this.categories, this.audits, this.groups); // TODO(bckenny): until tsc adds @implements support, assert that Config is a ConfigJson. /** @type {LH.Config.Json} */ const configJson = this; // eslint-disable-line no-unused-vars log.timeEnd(status); } /** * Provides a cleaned-up, stringified version of this config. Gatherer and * Audit `implementation` and `instance` do not survive this process. * @return {string} */ getPrintString() { const jsonConfig = deepClone(this); if (jsonConfig.passes) { for (const pass of jsonConfig.passes) { for (const gathererDefn of pass.gatherers) { gathererDefn.implementation = undefined; // @ts-ignore Breaking the Config.GathererDefn type. gathererDefn.instance = undefined; if (Object.keys(gathererDefn.options).length === 0) { // @ts-ignore Breaking the Config.GathererDefn type. gathererDefn.options = undefined; } } } } if (jsonConfig.audits) { for (const auditDefn of jsonConfig.audits) { // @ts-ignore Breaking the Config.AuditDefn type. auditDefn.implementation = undefined; if (Object.keys(auditDefn.options).length === 0) { // @ts-ignore Breaking the Config.AuditDefn type. auditDefn.options = undefined; } } } // Printed config is more useful with localized strings. i18n.replaceIcuMessageInstanceIds(jsonConfig, jsonConfig.settings.locale); return JSON.stringify(jsonConfig, null, 2); } /** * @param {LH.Config.Json} baseJSON The JSON of the configuration to extend * @param {LH.Config.Json} extendJSON The JSON of the extensions * @return {LH.Config.Json} */ static extendConfigJSON(baseJSON, extendJSON) { if (extendJSON.passes && baseJSON.passes) { for (const pass of extendJSON.passes) { // use the default pass name if one is not specified const passName = pass.passName || constants.defaultPassConfig.passName; const basePass = baseJSON.passes.find(candidate => candidate.passName === passName); if (!basePass) { baseJSON.passes.push(pass); } else { merge(basePass, pass); } } delete extendJSON.passes; } return merge(baseJSON, extendJSON); } /** * @param {LH.Config.Json} configJSON * @param {LH.Flags=} flags * @param {string=} configDir * @return {LH.Config.Json} */ static mergePlugins(configJSON, flags, configDir) { const configPlugins = configJSON.plugins || []; const flagPlugins = (flags && flags.plugins) || []; const pluginNames = new Set([...configPlugins, ...flagPlugins]); for (const pluginName of pluginNames) { assertValidPluginName(configJSON, pluginName); const pluginPath = resolveModule(pluginName, configDir, 'plugin'); const rawPluginJson = require(pluginPath); const pluginJson = ConfigPlugin.parsePlugin(rawPluginJson, pluginName); configJSON = Config.extendConfigJSON(configJSON, pluginJson); } return configJSON; } /** * @param {LH.Config.Json['passes']} passes * @return {?Array<Required<LH.Config.PassJson>>} */ static augmentPassesWithDefaults(passes) { if (!passes) { return null; } const {defaultPassConfig} = constants; return passes.map(pass => merge(deepClone(defaultPassConfig), pass)); } /** * @param {LH.SharedFlagsSettings=} settingsJson * @param {LH.Flags=} flags * @return {LH.Config.Settings} */ static initSettings(settingsJson = {}, flags) { // If a locale is requested in flags or settings, use it. A typical CLI run will not have one, // however `lookupLocale` will always determine which of our supported locales to use (falling // back if necessary). const locale = i18n.lookupLocale((flags && flags.locale) || settingsJson.locale); // Fill in missing settings with defaults const {defaultSettings} = constants; const settingWithDefaults = merge(deepClone(defaultSettings), settingsJson, true); // Override any applicable settings with CLI flags const settingsWithFlags = merge(settingWithDefaults || {}, cleanFlagsForSettings(flags), true); if (settingsWithFlags.budgets) { settingsWithFlags.budgets = Budget.initializeBudget(settingsWithFlags.budgets); } // Locale is special and comes only from flags/settings/lookupLocale. settingsWithFlags.locale = locale; return settingsWithFlags; } /** * Expands the gatherers from user-specified to an internal gatherer definition format. * * Input Examples: * - 'my-gatherer' * - class MyGatherer extends Gatherer { } * - {instance: myGathererInstance} * * @param {Array<LH.Config.GathererJson>} gatherers * @return {Array<{instance?: Gatherer, implementation?: GathererConstructor, path?: string, options?: {}}>} passes */ static expandGathererShorthand(gatherers) { const expanded = gatherers.map(gatherer => { if (typeof gatherer === 'string') { // just 'path/to/gatherer' return {path: gatherer, options: {}}; } else if ('implementation' in gatherer || 'instance' in gatherer) { // {implementation: GathererConstructor, ...} or {instance: GathererInstance, ...} return gatherer; } else if ('path' in gatherer) { // {path: 'path/to/gatherer', ...} if (typeof gatherer.path !== 'string') { throw new Error('Invalid Gatherer type ' + JSON.stringify(gatherer)); } return gatherer; } else if (typeof gatherer === 'function') { // just GathererConstructor return {implementation: gatherer, options: {}}; } else if (gatherer && typeof gatherer.beforePass === 'function') { // just GathererInstance return {instance: gatherer, options: {}}; } else { throw new Error('Invalid Gatherer type ' + JSON.stringify(gatherer)); } }); return expanded; } /** * Observed throttling methods (devtools/provided) require at least 5s of quiet for the metrics to * be computed. This method adjusts the quiet thresholds to the required minimums if necessary. * @param {LH.Config.Settings} settings * @param {?Array<Required<LH.Config.PassJson>>} passes */ static adjustDefaultPassForThrottling(settings, passes) { if (!passes || (settings.throttlingMethod !== 'devtools' && settings.throttlingMethod !== 'provided')) { return; } const defaultPass = passes.find(pass => pass.passName === 'defaultPass'); if (!defaultPass) return; const overrides = constants.nonSimulatedPassConfigOverrides; defaultPass.pauseAfterLoadMs = Math.max(overrides.pauseAfterLoadMs, defaultPass.pauseAfterLoadMs); defaultPass.cpuQuietThresholdMs = Math.max(overrides.cpuQuietThresholdMs, defaultPass.cpuQuietThresholdMs); defaultPass.networkQuietThresholdMs = Math.max(overrides.networkQuietThresholdMs, defaultPass.networkQuietThresholdMs); } /** * Filter out any unrequested items from the config, based on requested categories or audits. * @param {Config} config */ static filterConfigIfNeeded(config) { const settings = config.settings; if (!settings.onlyCategories && !settings.onlyAudits && !settings.skipAudits) { return; } // 1. Filter to just the chosen categories/audits const {categories, requestedAuditNames} = Config.filterCategoriesAndAudits(config.categories, settings); // 2. Resolve which audits will need to run const audits = config.audits && config.audits.filter(auditDefn => requestedAuditNames.has(auditDefn.implementation.meta.id)); // 3. Resolve which gatherers will need to run const requiredGathererIds = Config.getGatherersNeededByAudits(audits); // 4. Filter to only the neccessary passes const passes = Config.generatePassesNeededByGatherers(config.passes, requiredGathererIds); config.categories = categories; config.audits = audits; config.passes = passes; } /** * Filter out any unrequested categories or audits from the categories object. * @param {Config['categories']} oldCategories * @param {LH.Config.Settings} settings * @return {{categories: Config['categories'], requestedAuditNames: Set<string>}} */ static filterCategoriesAndAudits(oldCategories, settings) { if (!oldCategories) { return {categories: null, requestedAuditNames: new Set()}; } if (settings.onlyAudits && settings.skipAudits) { throw new Error('Cannot set both skipAudits and onlyAudits'); } /** @type {NonNullable<Config['categories']>} */ const categories = {}; const filterByIncludedCategory = !!settings.onlyCategories; const filterByIncludedAudit = !!settings.onlyAudits; const categoryIds = settings.onlyCategories || []; const auditIds = settings.onlyAudits || []; const skipAuditIds = settings.skipAudits || []; // warn if the category is not found categoryIds.forEach(categoryId => { if (!oldCategories[categoryId]) { log.warn('config', `unrecognized category in 'onlyCategories': ${categoryId}`); } }); // warn if the audit is not found in a category or there are overlaps const auditsToValidate = new Set(auditIds.concat(skipAuditIds)); for (const auditId of auditsToValidate) { const foundCategory = Object.keys(oldCategories).find(categoryId => { const auditRefs = oldCategories[categoryId].auditRefs; return !!auditRefs.find(candidate => candidate.id === auditId); }); if (!foundCategory) { const parentKeyName = skipAuditIds.includes(auditId) ? 'skipAudits' : 'onlyAudits'; log.warn('config', `unrecognized audit in '${parentKeyName}': ${auditId}`); } else if (auditIds.includes(auditId) && categoryIds.includes(foundCategory)) { log.warn('config', `${auditId} in 'onlyAudits' is already included by ` + `${foundCategory} in 'onlyCategories'`); } } const includedAudits = new Set(auditIds); skipAuditIds.forEach(id => includedAudits.delete(id)); Object.keys(oldCategories).forEach(categoryId => { const category = deepClone(oldCategories[categoryId]); if (filterByIncludedCategory && filterByIncludedAudit) { // If we're filtering to the category and audit whitelist, include the union of the two if (!categoryIds.includes(categoryId)) { category.auditRefs = category.auditRefs.filter(audit => auditIds.includes(audit.id)); } } else if (filterByIncludedCategory) { // If we're filtering to just the category whitelist and the category is not included, skip it if (!categoryIds.includes(categoryId)) { return; } } else if (filterByIncludedAudit) { category.auditRefs = category.auditRefs.filter(audit => auditIds.includes(audit.id)); } // always filter to the audit blacklist category.auditRefs = category.auditRefs.filter(audit => !skipAuditIds.includes(audit.id)); if (category.auditRefs.length) { categories[categoryId] = category; category.auditRefs.forEach(audit => includedAudits.add(audit.id)); } }); return {categories, requestedAuditNames: includedAudits}; } /** * @param {LH.Config.Json} config * @return {Array<{id: string, title: string}>} */ static getCategories(config) { const categories = config.categories; if (!categories) { return []; } return Object.keys(categories).map(id => { const title = categories[id].title; return {id, title}; }); } /** * From some requested audits, return names of all required artifacts * @param {Config['audits']} audits * @return {Set<string>} */ static getGatherersNeededByAudits(audits) { // It's possible we weren't given any audits (but existing audit results), in which case // there is no need to do any work here. if (!audits) { return new Set(); } return audits.reduce((list, auditDefn) => { auditDefn.implementation.meta.requiredArtifacts.forEach(artifact => list.add(artifact)); return list; }, new Set()); } /** * Filters to only required passes and gatherers, returning a new passes array. * @param {Config['passes']} passes * @param {Set<string>} requiredGatherers * @return {Config['passes']} */ static generatePassesNeededByGatherers(passes, requiredGatherers) { if (!passes) { return null; } const auditsNeedTrace = requiredGatherers.has('traces'); const filteredPasses = passes.map(pass => { // remove any unncessary gatherers from within the passes pass.gatherers = pass.gatherers.filter(gathererDefn => { const gatherer = gathererDefn.instance; return requiredGatherers.has(gatherer.name); }); // disable the trace if no audit requires a trace if (pass.recordTrace && !auditsNeedTrace) { const passName = pass.passName || 'unknown pass'; log.warn('config', `Trace not requested by an audit, dropping trace in ${passName}`); pass.recordTrace = false; } return pass; }).filter(pass => { // remove any passes lacking concrete gatherers, unless they are dependent on the trace if (pass.recordTrace) return true; // Always keep defaultPass if (pass.passName === 'defaultPass') return true; return pass.gatherers.length > 0; }); return filteredPasses; } /** * Take an array of audits and audit paths and require any paths (possibly * relative to the optional `configDir`) using `resolveModule`, * leaving only an array of AuditDefns. * @param {LH.Config.Json['audits']} audits * @param {string=} configDir * @return {Config['audits']} */ static requireAudits(audits, configDir) { const status = {msg: 'Requiring audits', id: 'lh:config:requireAudits'}; log.time(status, 'verbose'); const auditDefns = requireAudits(audits, configDir); log.timeEnd(status); return auditDefns; } /** * @param {string} path * @param {{}=} options * @param {Array<string>} coreAuditList * @param {string=} configDir * @return {LH.Config.GathererDefn} */ static requireGathererFromPath(path, options, coreAuditList, configDir) { const coreGatherer = coreAuditList.find(a => a === `${path}.js`); let requirePath = `../gather/gatherers/${path}`; if (!coreGatherer) { // Otherwise, attempt to find it elsewhere. This throws if not found. requirePath = resolveModule(path, configDir, 'gatherer'); } const GathererClass = /** @type {GathererConstructor} */ (require(requirePath)); return { instance: new GathererClass(), implementation: GathererClass, path, options: options || {}, }; } /** * Takes an array of passes with every property now initialized except the * gatherers and requires them, (relative to the optional `configDir` if * provided) using `resolveModule`, returning an array of full Passes. * @param {?Array<Required<LH.Config.PassJson>>} passes * @param {string=} configDir * @return {Config['passes']} */ static requireGatherers(passes, configDir) { if (!passes) { return null; } const status = {msg: 'Requiring gatherers', id: 'lh:config:requireGatherers'}; log.time(status, 'verbose'); const coreList = Runner.getGathererList(); const fullPasses = passes.map(pass => { const gathererDefns = Config.expandGathererShorthand(pass.gatherers).map(gathererDefn => { if (gathererDefn.instance) { return { instance: gathererDefn.instance, implementation: gathererDefn.implementation, path: gathererDefn.path, options: gathererDefn.options || {}, }; } else if (gathererDefn.implementation) { const GathererClass = gathererDefn.implementation; return { instance: new GathererClass(), implementation: gathererDefn.implementation, path: gathererDefn.path, options: gathererDefn.options || {}, }; } else if (gathererDefn.path) { const path = gathererDefn.path; const options = gathererDefn.options; return Config.requireGathererFromPath(path, options, coreList, configDir); } else { throw new Error('Invalid expanded Gatherer: ' + JSON.stringify(gathererDefn)); } }); const mergedDefns = mergeOptionsOfItems(gathererDefns); mergedDefns.forEach(gatherer => assertValidGatherer(gatherer.instance, gatherer.path)); return Object.assign(pass, {gatherers: mergedDefns}); }); log.timeEnd(status); return fullPasses; } } module.exports = Config;
wardpeet/lighthouse
lighthouse-core/config/config.js
JavaScript
apache-2.0
29,048
#!/usr/bin/python # # Copyright 2010 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Setup script for the DoubleClick Ad Exchange Buyer API Python Client Library. """ __author__ = 'api.sgrinberg@gmail.com (Stan Grinberg)' import os from distutils.core import setup from adspygoogle.adxbuyer import LIB_AUTHOR from adspygoogle.adxbuyer import LIB_AUTHOR_EMAIL from adspygoogle.adxbuyer import LIB_NAME from adspygoogle.adxbuyer import LIB_URL from adspygoogle.adxbuyer import LIB_VERSION PACKAGES = ['adspygoogle', 'adspygoogle.common', 'adspygoogle.common.https', 'adspygoogle.common.soappy', 'adspygoogle.adxbuyer', 'adspygoogle.SOAPpy', 'adspygoogle.SOAPpy.wstools'] PACKAGE_DATA = {'adspygoogle.adxbuyer': [os.path.join('data', '*')]} setup(name='adspygoogle.adxbuyer', version=LIB_VERSION, description=LIB_NAME, author=LIB_AUTHOR, author_email=LIB_AUTHOR_EMAIL, maintainer=LIB_AUTHOR, maintainer_email=LIB_AUTHOR_EMAIL, url=LIB_URL, license='Apache License 2.0', long_description='For additional information, please see %s' % LIB_URL, packages=PACKAGES, package_data=PACKAGE_DATA, platforms='any')
donspaulding/adspygoogle
scripts/adspygoogle/adxbuyer/setup.py
Python
apache-2.0
1,720
package com.github.danielwegener.logback.kafka.encoding; import ch.qos.logback.core.Layout; import java.nio.charset.Charset; /** * A KafkaMessageEncoder that can be configured with a {@link Layout} and a {@link Charset} and creates * a serialized string for each event using the given layout. * @since 0.1.0 */ public class LayoutKafkaMessageEncoder<E> extends KafkaMessageEncoderBase<E> { public LayoutKafkaMessageEncoder() { } public LayoutKafkaMessageEncoder(Layout<E> layout, Charset charset) { this.layout = layout; this.charset = charset; } private Layout<E> layout; private Charset charset; private static final Charset UTF8 = Charset.forName("UTF-8"); @Override public void start() { if (charset == null) { addInfo("No charset specified for PatternLayoutKafkaEncoder. Using default UTF8 encoding."); charset = UTF8; } super.start(); } @Override public byte[] doEncode(E event) { final String message = layout.doLayout(event); return message.getBytes(charset); } public void setLayout(Layout<E> layout) { this.layout = layout; } public void setCharset(Charset charset) { this.charset = charset; } public Layout<E> getLayout() { return layout; } public Charset getCharset() { return charset; } }
aerskine/logback-kafka-appender
src/main/java/com/github/danielwegener/logback/kafka/encoding/LayoutKafkaMessageEncoder.java
Java
apache-2.0
1,414
package bookshop2.supplier.data.supplierOrderCache; import java.util.Collection; import java.util.List; import java.util.Map; import bookshop2.Book; import bookshop2.BookCriteria; import bookshop2.BookKey; public interface SupplierOrderCache { public List<Book> getAllBooksInStock(); public Map<BookKey, Book> getAllBooksInStockAsMap(); public Book getFromBooksInStock(Long bookId); public Book getFromBooksInStock(BookKey bookKey); public List<Book> getFromBooksInStock(Collection<Long> bookIds); public Map<BookKey, Book> getFromBooksInStockAsMap(Collection<BookKey> bookKeys); public List<Book> getFromBooksInStock(BookCriteria bookCriteria); public List<Book> getMatchingBooksInStock(Collection<Book> bookList); public Map<BookKey, Book> getMatchingBooksInStockAsMap(Collection<Book> bookList); public void setBooksInStock(Map<BookKey, Book> bookMap); public void addToBooksInStock(Book book); public void addToBooksInStock(Collection<Book> bookList); public void addToBooksInStock(Map<BookKey, Book> bookMap); public void removeAllBooksInStock(); public void removeFromBooksInStock(Book book); public void removeFromBooksInStock(Long bookId); public void removeFromBooksInStock(BookKey bookKey); public void removeFromBooksInStock(Collection<Book> bookList); public void removeFromBooksInStock(BookCriteria bookCriteria); }
tfisher1226/ARIES
bookshop2/bookshop2-supplier/bookshop2-supplier-service/src/main/java/bookshop2/supplier/data/supplierOrderCache/SupplierOrderCache.java
Java
apache-2.0
1,392
/** * Copyright 2009 Roland Foerther, Carl-Eric-Menzel, Olaf Siefart * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.senacor.wbs.web.core; import java.util.Locale; import junit.framework.TestCase; import org.apache.wicket.util.convert.ConversionException; import com.senacor.domain.project.ProjectState; public class TestEnumConverter extends TestCase { private EnumConverter ec; @Override protected void setUp() throws Exception { ec = new EnumConverter(ProjectState.class); } public void testConvertToObject() throws Exception { Object obj = ec.convertToObject("In Development", Locale.getDefault()); assertTrue(obj instanceof ProjectState); assertEquals(ProjectState.IN_DEVELOPMENT, obj); obj = ec.convertToObject(" Final ", Locale.getDefault()); assertEquals(ProjectState.FINAL, obj); try { obj = ec.convertToObject("Unknown911", Locale.getDefault()); fail("ConversionException expected here"); } catch (ConversionException ex) { // ok } catch (Exception ex) { fail("ConversionException expected, but was: " + ex); } } public void testConvertToString() throws Exception { String str = ec.convertToString(ProjectState.IN_DEVELOPMENT, Locale.getDefault()); assertEquals("In Development", str); str = ec.convertToString(ProjectState.FINAL, Locale.getDefault()); assertEquals("Final", str); } }
duesenklipper/wicketbuch-wbs
wbs-web-client/src/test/java/com/senacor/wbs/web/core/TestEnumConverter.java
Java
apache-2.0
1,936
namespace ts { describe("convertCompilerOptionsFromJson", () => { const formatDiagnosticHost: FormatDiagnosticsHost = { getCurrentDirectory: () => "/apath/", getCanonicalFileName: createGetCanonicalFileName(/*useCaseSensitiveFileNames*/ true), getNewLine: () => "\n" }; interface ExpectedResultWithParsingSuccess { compilerOptions: CompilerOptions; errors: ReadonlyArray<Diagnostic>; } interface ExpectedResultWithParsingFailure { compilerOptions: CompilerOptions; hasParseErrors: true; } type ExpectedResult = ExpectedResultWithParsingSuccess | ExpectedResultWithParsingFailure; function isExpectedResultWithParsingFailure(expectedResult: ExpectedResult): expectedResult is ExpectedResultWithParsingFailure { return !!(expectedResult as ExpectedResultWithParsingFailure).hasParseErrors; } function assertCompilerOptions(json: any, configFileName: string, expectedResult: ExpectedResultWithParsingSuccess) { assertCompilerOptionsWithJson(json, configFileName, expectedResult); assertCompilerOptionsWithJsonNode(json, configFileName, expectedResult); } function assertCompilerOptionsWithJson(json: any, configFileName: string, expectedResult: ExpectedResultWithParsingSuccess) { const { options: actualCompilerOptions, errors: actualErrors } = convertCompilerOptionsFromJson(json.compilerOptions, "/apath/", configFileName); const parsedCompilerOptions = JSON.stringify(actualCompilerOptions); const expectedCompilerOptions = JSON.stringify({ ...expectedResult.compilerOptions, configFilePath: configFileName }); assert.equal(parsedCompilerOptions, expectedCompilerOptions); verifyErrors(actualErrors, expectedResult.errors, /*ignoreLocation*/ true); } function assertCompilerOptionsWithJsonNode(json: any, configFileName: string, expectedResult: ExpectedResultWithParsingSuccess) { assertCompilerOptionsWithJsonText(JSON.stringify(json), configFileName, expectedResult); } function assertCompilerOptionsWithJsonText(fileText: string, configFileName: string, expectedResult: ExpectedResult) { const result = parseJsonText(configFileName, fileText); assert(!!result.endOfFileToken); assert.equal(!!result.parseDiagnostics.length, isExpectedResultWithParsingFailure(expectedResult)); const host: ParseConfigHost = new fakes.ParseConfigHost(new vfs.FileSystem(/*ignoreCase*/ false, { cwd: "/apath/" })); const { options: actualCompilerOptions, errors: actualParseErrors } = parseJsonSourceFileConfigFileContent(result, host, "/apath/", /*existingOptions*/ undefined, configFileName); expectedResult.compilerOptions.configFilePath = configFileName; const parsedCompilerOptions = JSON.stringify(actualCompilerOptions); const expectedCompilerOptions = JSON.stringify(expectedResult.compilerOptions); assert.equal(parsedCompilerOptions, expectedCompilerOptions); assert.equal(actualCompilerOptions.configFile, result); if (!isExpectedResultWithParsingFailure(expectedResult)) { verifyErrors(actualParseErrors.filter(error => error.code !== Diagnostics.No_inputs_were_found_in_config_file_0_Specified_include_paths_were_1_and_exclude_paths_were_2.code), expectedResult.errors); } } function verifyErrors(actualErrors: Diagnostic[], expectedErrors: ReadonlyArray<Diagnostic>, ignoreLocation?: boolean) { assert.isTrue(expectedErrors.length === actualErrors.length, `Expected error: ${JSON.stringify(expectedErrors.map(getDiagnosticString), undefined, " ")}. Actual error: ${JSON.stringify(actualErrors.map(getDiagnosticString), undefined, " ")}.`); for (let i = 0; i < actualErrors.length; i++) { const actualError = actualErrors[i]; const expectedError = expectedErrors[i]; assert.equal(actualError.code, expectedError.code, `Expected error-code: ${JSON.stringify(expectedError.code)}. Actual error-code: ${JSON.stringify(actualError.code)}.`); assert.equal(actualError.category, expectedError.category, `Expected error-category: ${JSON.stringify(expectedError.category)}. Actual error-category: ${JSON.stringify(actualError.category)}.`); if (!ignoreLocation) { assert(actualError.file); assert(actualError.start); assert(actualError.length); } } function getDiagnosticString(diagnostic: Diagnostic) { if (ignoreLocation) { const { file, ...rest } = diagnostic; diagnostic = { file: undefined, ...rest }; } return formatDiagnostic(diagnostic, formatDiagnosticHost); } } // tsconfig.json tests it("Convert correctly format tsconfig.json to compiler-options ", () => { assertCompilerOptions( { compilerOptions: { module: "commonjs", target: "es5", noImplicitAny: false, sourceMap: false, lib: ["es5", "es2015.core", "es2015.symbol"] } }, "tsconfig.json", { compilerOptions: { module: ModuleKind.CommonJS, target: ScriptTarget.ES5, noImplicitAny: false, sourceMap: false, lib: ["lib.es5.d.ts", "lib.es2015.core.d.ts", "lib.es2015.symbol.d.ts"] }, errors: [] } ); }); it("Convert correctly format tsconfig.json with allowJs is false to compiler-options ", () => { assertCompilerOptions( { compilerOptions: { module: "commonjs", target: "es5", noImplicitAny: false, sourceMap: false, allowJs: false, lib: ["es5", "es2015.core", "es2015.symbol"] } }, "tsconfig.json", { compilerOptions: { module: ModuleKind.CommonJS, target: ScriptTarget.ES5, noImplicitAny: false, sourceMap: false, allowJs: false, lib: ["lib.es5.d.ts", "lib.es2015.core.d.ts", "lib.es2015.symbol.d.ts"] }, errors: [] } ); }); it("Convert incorrect option of jsx to compiler-options ", () => { assertCompilerOptions( { compilerOptions: { module: "commonjs", target: "es5", noImplicitAny: false, sourceMap: false, jsx: "" } }, "tsconfig.json", { compilerOptions: { module: ModuleKind.CommonJS, target: ScriptTarget.ES5, noImplicitAny: false, sourceMap: false, }, errors: [{ file: undefined, start: 0, length: 0, messageText: "Argument for '--jsx' option must be: 'preserve', 'react-native', 'react'.", code: Diagnostics.Argument_for_0_option_must_be_Colon_1.code, category: Diagnostics.Argument_for_0_option_must_be_Colon_1.category }] } ); }); it("Convert incorrect option of module to compiler-options ", () => { assertCompilerOptions( { compilerOptions: { module: "", target: "es5", noImplicitAny: false, sourceMap: false, } }, "tsconfig.json", { compilerOptions: { target: ScriptTarget.ES5, noImplicitAny: false, sourceMap: false, }, errors: [{ file: undefined, start: 0, length: 0, messageText: "Argument for '--module' option must be: 'none', 'commonjs', 'amd', 'system', 'umd', 'es6', 'es2015', 'esnext'.", code: Diagnostics.Argument_for_0_option_must_be_Colon_1.code, category: Diagnostics.Argument_for_0_option_must_be_Colon_1.category }] } ); }); it("Convert incorrect option of newLine to compiler-options ", () => { assertCompilerOptions( { compilerOptions: { newLine: "", target: "es5", noImplicitAny: false, sourceMap: false, } }, "tsconfig.json", { compilerOptions: { target: ScriptTarget.ES5, noImplicitAny: false, sourceMap: false, }, errors: [{ file: undefined, start: 0, length: 0, messageText: "Argument for '--newLine' option must be: 'crlf', 'lf'.", code: Diagnostics.Argument_for_0_option_must_be_Colon_1.code, category: Diagnostics.Argument_for_0_option_must_be_Colon_1.category }] } ); }); it("Convert incorrect option of target to compiler-options ", () => { assertCompilerOptions( { compilerOptions: { target: "", noImplicitAny: false, sourceMap: false, } }, "tsconfig.json", { compilerOptions: { noImplicitAny: false, sourceMap: false, }, errors: [{ file: undefined, start: 0, length: 0, messageText: "Argument for '--target' option must be: 'es3', 'es5', 'es6', 'es2015', 'es2016', 'es2017', 'es2018', 'esnext'.", code: Diagnostics.Argument_for_0_option_must_be_Colon_1.code, category: Diagnostics.Argument_for_0_option_must_be_Colon_1.category }] } ); }); it("Convert incorrect option of module-resolution to compiler-options ", () => { assertCompilerOptions( { compilerOptions: { moduleResolution: "", noImplicitAny: false, sourceMap: false, } }, "tsconfig.json", { compilerOptions: { noImplicitAny: false, sourceMap: false, }, errors: [{ file: undefined, start: 0, length: 0, messageText: "Argument for '--moduleResolution' option must be: 'node', 'classic'.", code: Diagnostics.Argument_for_0_option_must_be_Colon_1.code, category: Diagnostics.Argument_for_0_option_must_be_Colon_1.category }] } ); }); it("Convert incorrect option of libs to compiler-options ", () => { assertCompilerOptions( { compilerOptions: { module: "commonjs", target: "es5", noImplicitAny: false, sourceMap: false, lib: ["es5", "es2015.core", "incorrectLib"] } }, "tsconfig.json", { compilerOptions: { module: ModuleKind.CommonJS, target: ScriptTarget.ES5, noImplicitAny: false, sourceMap: false, lib: ["lib.es5.d.ts", "lib.es2015.core.d.ts"] }, errors: [{ file: undefined, start: 0, length: 0, messageText: "Argument for '--lib' option must be: 'es5', 'es6', 'es2015', 'es7', 'es2016', 'es2017', 'es2018', 'esnext', 'dom', 'dom.iterable', 'webworker', 'webworker.importscripts', 'scripthost', 'es2015.core', 'es2015.collection', 'es2015.generator', 'es2015.iterable', 'es2015.promise', 'es2015.proxy', 'es2015.reflect', 'es2015.symbol', 'es2015.symbol.wellknown', 'es2016.array.include', 'es2017.object', 'es2017.sharedmemory', 'es2017.string', 'es2017.intl', 'es2017.typedarrays', 'es2018.intl', 'es2018.promise', 'es2018.regexp', 'esnext.array', 'esnext.symbol', 'esnext.asynciterable', 'esnext.intl'.", code: Diagnostics.Argument_for_0_option_must_be_Colon_1.code, category: Diagnostics.Argument_for_0_option_must_be_Colon_1.category }] } ); }); it("Convert empty string option of libs to compiler-options ", () => { assertCompilerOptions( { compilerOptions: { module: "commonjs", target: "es5", noImplicitAny: false, sourceMap: false, lib: ["es5", ""] } }, "tsconfig.json", { compilerOptions: { module: ModuleKind.CommonJS, target: ScriptTarget.ES5, noImplicitAny: false, sourceMap: false, lib: ["lib.es5.d.ts"] }, errors: [{ file: undefined, start: 0, length: 0, messageText: "Argument for '--lib' option must be: 'es5', 'es6', 'es2015', 'es7', 'es2016', 'es2017', 'es2018', 'esnext', 'dom', 'dom.iterable', 'webworker', 'webworker.importscripts', 'scripthost', 'es2015.core', 'es2015.collection', 'es2015.generator', 'es2015.iterable', 'es2015.promise', 'es2015.proxy', 'es2015.reflect', 'es2015.symbol', 'es2015.symbol.wellknown', 'es2016.array.include', 'es2017.object', 'es2017.sharedmemory', 'es2017.string', 'es2017.intl', 'es2017.typedarrays', 'es2018.intl', 'es2018.promise', 'es2018.regexp', 'esnext.array', 'esnext.symbol', 'esnext.asynciterable', 'esnext.intl'.", code: Diagnostics.Argument_for_0_option_must_be_Colon_1.code, category: Diagnostics.Argument_for_0_option_must_be_Colon_1.category }] } ); }); it("Convert empty string option of libs to compiler-options ", () => { assertCompilerOptions( { compilerOptions: { module: "commonjs", target: "es5", noImplicitAny: false, sourceMap: false, lib: [""] } }, "tsconfig.json", { compilerOptions: { module: ModuleKind.CommonJS, target: ScriptTarget.ES5, noImplicitAny: false, sourceMap: false, lib: [] }, errors: [{ file: undefined, start: 0, length: 0, messageText: "Argument for '--lib' option must be: 'es5', 'es6', 'es2015', 'es7', 'es2016', 'es2017', 'es2018', 'esnext', 'dom', 'dom.iterable', 'webworker', 'webworker.importscripts', 'scripthost', 'es2015.core', 'es2015.collection', 'es2015.generator', 'es2015.iterable', 'es2015.promise', 'es2015.proxy', 'es2015.reflect', 'es2015.symbol', 'es2015.symbol.wellknown', 'es2016.array.include', 'es2017.object', 'es2017.sharedmemory', 'es2017.string', 'es2017.intl', 'es2017.typedarrays', 'es2018.intl', 'es2018.promise', 'es2018.regexp', 'esnext.array', 'esnext.symbol', 'esnext.asynciterable', 'esnext.intl'.", code: Diagnostics.Argument_for_0_option_must_be_Colon_1.code, category: Diagnostics.Argument_for_0_option_must_be_Colon_1.category }] } ); }); it("Convert trailing-whitespace string option of libs to compiler-options ", () => { assertCompilerOptions( { compilerOptions: { module: "commonjs", target: "es5", noImplicitAny: false, sourceMap: false, lib: [" "] } }, "tsconfig.json", { compilerOptions: { module: ModuleKind.CommonJS, target: ScriptTarget.ES5, noImplicitAny: false, sourceMap: false, lib: [] }, errors: [{ file: undefined, start: 0, length: 0, messageText: "Argument for '--lib' option must be: 'es5', 'es6', 'es2015', 'es7', 'es2016', 'es2017', 'es2018', 'esnext', 'dom', 'dom.iterable', 'webworker', 'webworker.importscripts', 'scripthost', 'es2015.core', 'es2015.collection', 'es2015.generator', 'es2015.iterable', 'es2015.promise', 'es2015.proxy', 'es2015.reflect', 'es2015.symbol', 'es2015.symbol.wellknown', 'es2016.array.include', 'es2017.object', 'es2017.sharedmemory', 'es2017.string', 'es2017.intl', 'es2017.typedarrays', 'es2018.intl', 'es2018.promise', 'es2018.regexp', 'esnext.array', 'esnext.symbol', 'esnext.asynciterable', 'esnext.intl'.", code: Diagnostics.Argument_for_0_option_must_be_Colon_1.code, category: Diagnostics.Argument_for_0_option_must_be_Colon_1.category }] } ); }); it("Convert empty option of libs to compiler-options ", () => { assertCompilerOptions( { compilerOptions: { module: "commonjs", target: "es5", noImplicitAny: false, sourceMap: false, lib: [] } }, "tsconfig.json", { compilerOptions: { module: ModuleKind.CommonJS, target: ScriptTarget.ES5, noImplicitAny: false, sourceMap: false, lib: [] }, errors: [] } ); }); it("Convert incorrectly format tsconfig.json to compiler-options ", () => { assertCompilerOptions( { compilerOptions: { modu: "commonjs", } }, "tsconfig.json", { compilerOptions: {}, errors: [{ file: undefined, start: 0, length: 0, messageText: "Unknown compiler option 'modu'.", code: Diagnostics.Unknown_compiler_option_0.code, category: Diagnostics.Unknown_compiler_option_0.category }] } ); }); it("Convert default tsconfig.json to compiler-options ", () => { assertCompilerOptions({}, "tsconfig.json", { compilerOptions: {}, errors: [] } ); }); it("Convert negative numbers in tsconfig.json ", () => { assertCompilerOptions( { compilerOptions: { allowJs: true, maxNodeModuleJsDepth: -1 } }, "tsconfig.json", { compilerOptions: { allowJs: true, maxNodeModuleJsDepth: -1 }, errors: [] } ); }); // jsconfig.json it("Convert correctly format jsconfig.json to compiler-options ", () => { assertCompilerOptions( { compilerOptions: { module: "commonjs", target: "es5", noImplicitAny: false, sourceMap: false, lib: ["es5", "es2015.core", "es2015.symbol"] } }, "jsconfig.json", { compilerOptions: { allowJs: true, maxNodeModuleJsDepth: 2, allowSyntheticDefaultImports: true, skipLibCheck: true, noEmit: true, module: ModuleKind.CommonJS, target: ScriptTarget.ES5, noImplicitAny: false, sourceMap: false, lib: ["lib.es5.d.ts", "lib.es2015.core.d.ts", "lib.es2015.symbol.d.ts"] }, errors: [] } ); }); it("Convert correctly format jsconfig.json with allowJs is false to compiler-options ", () => { assertCompilerOptions( { compilerOptions: { module: "commonjs", target: "es5", noImplicitAny: false, sourceMap: false, allowJs: false, lib: ["es5", "es2015.core", "es2015.symbol"] } }, "jsconfig.json", { compilerOptions: { allowJs: false, maxNodeModuleJsDepth: 2, allowSyntheticDefaultImports: true, skipLibCheck: true, noEmit: true, module: ModuleKind.CommonJS, target: ScriptTarget.ES5, noImplicitAny: false, sourceMap: false, lib: ["lib.es5.d.ts", "lib.es2015.core.d.ts", "lib.es2015.symbol.d.ts"] }, errors: [] } ); }); it("Convert incorrectly format jsconfig.json to compiler-options ", () => { assertCompilerOptions( { compilerOptions: { modu: "commonjs", } }, "jsconfig.json", { compilerOptions: { allowJs: true, maxNodeModuleJsDepth: 2, allowSyntheticDefaultImports: true, skipLibCheck: true, noEmit: true }, errors: [{ file: undefined, start: 0, length: 0, messageText: "Unknown compiler option 'modu'.", code: Diagnostics.Unknown_compiler_option_0.code, category: Diagnostics.Unknown_compiler_option_0.category }] } ); }); it("Convert default jsconfig.json to compiler-options ", () => { assertCompilerOptions({}, "jsconfig.json", { compilerOptions: { allowJs: true, maxNodeModuleJsDepth: 2, allowSyntheticDefaultImports: true, skipLibCheck: true, noEmit: true }, errors: [] } ); }); it("Convert tsconfig options when there are multiple invalid strings", () => { assertCompilerOptionsWithJsonText(`{ "compilerOptions": { "target": "<%- options.useTsWithBabel ? 'esnext' : 'es5' %>", "module": "esnext", <%_ if (options.classComponent) { _%> "experimentalDecorators": true, <%_ } _%> "sourceMap": true, "types": [ "webpack-env"<% if (hasMocha || hasJest) { %>,<% } %> <%_ if (hasMocha) { _%> "mocha", "chai" <%_ } else if (hasJest) { _%> "jest" <%_ } _%> ] } } `, "tsconfig.json", { compilerOptions: { target: undefined, module: ModuleKind.ESNext, types: [] }, hasParseErrors: true } ); }); }); }
basarat/TypeScript
src/testRunner/unittests/convertCompilerOptionsFromJson.ts
TypeScript
apache-2.0
27,605
window.MathJax = { skipStartupTypeset: true, messageStyle: 'none', 'HTML-CSS': { imageFont: null, linebreaks: { automatic: true, width: '500px' }, scale: 91, showMathMenu: false } };
souravbadami/oppia
core/templates/dev/head/mathjaxConfig.ts
TypeScript
apache-2.0
223
import os import tempfile import unittest import logging from pyidf import ValidationLevel import pyidf from pyidf.idf import IDF from pyidf.node import PipeUnderground log = logging.getLogger(__name__) class TestPipeUnderground(unittest.TestCase): def setUp(self): self.fd, self.path = tempfile.mkstemp() def tearDown(self): os.remove(self.path) def test_create_pipeunderground(self): pyidf.validation_level = ValidationLevel.error obj = PipeUnderground() # alpha var_name = "Name" obj.name = var_name # object-list var_construction_name = "object-list|Construction Name" obj.construction_name = var_construction_name # node var_fluid_inlet_node_name = "node|Fluid Inlet Node Name" obj.fluid_inlet_node_name = var_fluid_inlet_node_name # node var_fluid_outlet_node_name = "node|Fluid Outlet Node Name" obj.fluid_outlet_node_name = var_fluid_outlet_node_name # alpha var_sun_exposure = "SunExposed" obj.sun_exposure = var_sun_exposure # real var_pipe_inside_diameter = 0.0001 obj.pipe_inside_diameter = var_pipe_inside_diameter # real var_pipe_length = 0.0001 obj.pipe_length = var_pipe_length # alpha var_soil_material_name = "Soil Material Name" obj.soil_material_name = var_soil_material_name # alpha var_undisturbed_ground_temperature_model_type = "Site:GroundTemperature:Undisturbed:FiniteDifference" obj.undisturbed_ground_temperature_model_type = var_undisturbed_ground_temperature_model_type # object-list var_undisturbed_ground_temperature_model_name = "object-list|Undisturbed Ground Temperature Model Name" obj.undisturbed_ground_temperature_model_name = var_undisturbed_ground_temperature_model_name idf = IDF() idf.add(obj) idf.save(self.path, check=False) with open(self.path, mode='r') as f: for line in f: log.debug(line.strip()) idf2 = IDF(self.path) self.assertEqual(idf2.pipeundergrounds[0].name, var_name) self.assertEqual(idf2.pipeundergrounds[0].construction_name, var_construction_name) self.assertEqual(idf2.pipeundergrounds[0].fluid_inlet_node_name, var_fluid_inlet_node_name) self.assertEqual(idf2.pipeundergrounds[0].fluid_outlet_node_name, var_fluid_outlet_node_name) self.assertEqual(idf2.pipeundergrounds[0].sun_exposure, var_sun_exposure) self.assertAlmostEqual(idf2.pipeundergrounds[0].pipe_inside_diameter, var_pipe_inside_diameter) self.assertAlmostEqual(idf2.pipeundergrounds[0].pipe_length, var_pipe_length) self.assertEqual(idf2.pipeundergrounds[0].soil_material_name, var_soil_material_name) self.assertEqual(idf2.pipeundergrounds[0].undisturbed_ground_temperature_model_type, var_undisturbed_ground_temperature_model_type) self.assertEqual(idf2.pipeundergrounds[0].undisturbed_ground_temperature_model_name, var_undisturbed_ground_temperature_model_name)
rbuffat/pyidf
tests/test_pipeunderground.py
Python
apache-2.0
3,135
package org.gradle.test.performance.mediummonolithicjavaproject.p382; import org.junit.Test; import static org.junit.Assert.*; public class Test7645 { Production7645 objectUnderTest = new Production7645(); @Test public void testProperty0() { String value = "value"; objectUnderTest.setProperty0(value); assertEquals(value, objectUnderTest.getProperty0()); } @Test public void testProperty1() { String value = "value"; objectUnderTest.setProperty1(value); assertEquals(value, objectUnderTest.getProperty1()); } @Test public void testProperty2() { String value = "value"; objectUnderTest.setProperty2(value); assertEquals(value, objectUnderTest.getProperty2()); } @Test public void testProperty3() { String value = "value"; objectUnderTest.setProperty3(value); assertEquals(value, objectUnderTest.getProperty3()); } @Test public void testProperty4() { String value = "value"; objectUnderTest.setProperty4(value); assertEquals(value, objectUnderTest.getProperty4()); } @Test public void testProperty5() { String value = "value"; objectUnderTest.setProperty5(value); assertEquals(value, objectUnderTest.getProperty5()); } @Test public void testProperty6() { String value = "value"; objectUnderTest.setProperty6(value); assertEquals(value, objectUnderTest.getProperty6()); } @Test public void testProperty7() { String value = "value"; objectUnderTest.setProperty7(value); assertEquals(value, objectUnderTest.getProperty7()); } @Test public void testProperty8() { String value = "value"; objectUnderTest.setProperty8(value); assertEquals(value, objectUnderTest.getProperty8()); } @Test public void testProperty9() { String value = "value"; objectUnderTest.setProperty9(value); assertEquals(value, objectUnderTest.getProperty9()); } }
oehme/analysing-gradle-performance
my-app/src/test/java/org/gradle/test/performance/mediummonolithicjavaproject/p382/Test7645.java
Java
apache-2.0
2,111
package cn.hugeterry.updatefun.view; import android.app.Activity; import android.os.Bundle; import android.view.View; import android.view.View.OnClickListener; import android.widget.ImageView; import android.widget.ProgressBar; import android.widget.TextView; import cn.hugeterry.updatefun.config.DownloadKey; import cn.hugeterry.updatefun.R; import cn.hugeterry.updatefun.module.Download; /** * Created by hugeterry(http://hugeterry.cn) */ public class DownLoadDialog extends Activity { private ImageView close; public ProgressBar progressBar; public TextView textView; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.download_dialog); close = (ImageView) findViewById(R.id.downloaddialog_close); progressBar = (ProgressBar) findViewById(R.id.downloaddialog_progress); textView = (TextView) findViewById(R.id.downloaddialog_count); if (DownloadKey.interceptFlag) DownloadKey.interceptFlag = false; new Download(this).start(); close.setOnClickListener(new OnClickListener() { @Override public void onClick(View arg0) { DownloadKey.TOShowDownloadView = 1; DownloadKey.interceptFlag = true; if (DownloadKey.ISManual) { DownloadKey.LoadManual = false; } finish(); } }); } }
hugeterry/UpdateDemo
updatefun/src/main/java/cn/hugeterry/updatefun/view/DownLoadDialog.java
Java
apache-2.0
1,492
// Copyright 2017 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * controller-ray * * Ray line indicator for VR hand controllers. * * This replaces the parabola arc whenever the user is not holding the * controller button down. */ import { Scene } from '../core/scene'; if ( typeof AFRAME !== 'undefined' && AFRAME ) { AFRAME.registerComponent( 'controller-ray', { schema: { width: { default: 0.005 }, length: { default: 1 } }, init: function() { this.isInteractive = false; this.geometry = new THREE.PlaneBufferGeometry( this.data.width, this.data.length ); this.geometry.rotateX( Math.PI / -2 ); this.geometry.translate( 0, 0, this.data.length / -2 ); this.material = new THREE.MeshBasicMaterial(); this.mesh = new THREE.Mesh( this.geometry, this.material ); this.el.setObject3D( 'mesh', this.mesh ); this.el.setAttribute( 'visible', false ); this.el.sceneEl.addEventListener( 'terrain-intersected-cleared', () => { if ( !this.isInteractive ) return; if ( Scene.controllerType === 'mouse-touch' ) return; this.el.setAttribute( 'visible', true ); }); this.el.sceneEl.addEventListener( 'terrain-intersected', () => { if ( !this.isInteractive ) return; if ( Scene.controllerType === 'mouse-touch' ) return; this.el.setAttribute( 'visible', false ); }); this.el.sceneEl.addEventListener( 'stateadded', event => { if ( event.detail.state === 'interactive' ) this.isInteractive = true; if ( event.target !== this.el.sceneEl ) return; }); this.el.sceneEl.addEventListener( 'stateremoved', event => { if ( event.detail.state === 'interactive' ) this.isInteractive = false; if ( event.target !== this.el.sceneEl ) return; }); }, play: function() { } }); }
googlecreativelab/access-mars
src/js/components/controller-ray.js
JavaScript
apache-2.0
2,310
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.adapter.druid; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexNode; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.type.SqlTypeName; import com.google.common.collect.ImmutableList; import org.joda.time.Period; import java.util.TimeZone; /** * Druid cast converter operator used to translates calcite casts to Druid expression cast */ public class DruidSqlCastConverter implements DruidSqlOperatorConverter { @Override public SqlOperator calciteOperator() { return SqlStdOperatorTable.CAST; } @Override public String toDruidExpression(RexNode rexNode, RelDataType topRel, DruidQuery druidQuery) { final RexNode operand = ((RexCall) rexNode).getOperands().get(0); final String operandExpression = DruidExpressions.toDruidExpression(operand, topRel, druidQuery); if (operandExpression == null) { return null; } final SqlTypeName fromType = operand.getType().getSqlTypeName(); final SqlTypeName toType = rexNode.getType().getSqlTypeName(); final String timeZoneConf = druidQuery.getConnectionConfig().timeZone(); final TimeZone timeZone = TimeZone.getTimeZone(timeZoneConf == null ? "UTC" : timeZoneConf); if (SqlTypeName.CHAR_TYPES.contains(fromType) && SqlTypeName.DATETIME_TYPES.contains(toType)) { //case chars to dates return castCharToDateTime(timeZone, operandExpression, toType); } else if (SqlTypeName.DATETIME_TYPES.contains(fromType) && SqlTypeName.CHAR_TYPES.contains (toType)) { //case dates to chars return castDateTimeToChar(timeZone, operandExpression, fromType); } else { // Handle other casts. final DruidType fromExprType = DruidExpressions.EXPRESSION_TYPES.get(fromType); final DruidType toExprType = DruidExpressions.EXPRESSION_TYPES.get(toType); if (fromExprType == null || toExprType == null) { // Unknown types bail out. return null; } final String typeCastExpression; if (fromExprType != toExprType) { typeCastExpression = DruidQuery.format("CAST(%s, '%s')", operandExpression, toExprType .toString()); } else { // case it is the same type it is ok to skip CAST typeCastExpression = operandExpression; } if (toType == SqlTypeName.DATE) { // Floor to day when casting to DATE. return DruidExpressions.applyTimestampFloor( typeCastExpression, Period.days(1).toString(), "", TimeZone.getTimeZone(druidQuery.getConnectionConfig().timeZone())); } else { return typeCastExpression; } } } private static String castCharToDateTime( TimeZone timeZone, String operand, final SqlTypeName toType) { // Cast strings to date times by parsing them from SQL format. final String timestampExpression = DruidExpressions.functionCall( "timestamp_parse", ImmutableList.of( operand, DruidExpressions.stringLiteral(""), DruidExpressions.stringLiteral(timeZone.getID()))); if (toType == SqlTypeName.DATE) { // case to date we need to floor to day first return DruidExpressions.applyTimestampFloor( timestampExpression, Period.days(1).toString(), "", timeZone); } else if (toType == SqlTypeName.TIMESTAMP || toType == SqlTypeName .TIMESTAMP_WITH_LOCAL_TIME_ZONE) { return timestampExpression; } else { throw new IllegalStateException( DruidQuery.format("Unsupported DateTime type[%s]", toType)); } } private static String castDateTimeToChar( final TimeZone timeZone, final String operand, final SqlTypeName fromType) { return DruidExpressions.functionCall( "timestamp_format", ImmutableList.of( operand, DruidExpressions.stringLiteral(dateTimeFormatString(fromType)), DruidExpressions.stringLiteral(timeZone.getID()))); } public static String dateTimeFormatString(final SqlTypeName sqlTypeName) { if (sqlTypeName == SqlTypeName.DATE) { return "yyyy-MM-dd"; } else if (sqlTypeName == SqlTypeName.TIMESTAMP) { return "yyyy-MM-dd HH:mm:ss"; } else if (sqlTypeName == sqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE) { return "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; } else { return null; } } } // End DruidSqlCastConverter.java
b-slim/calcite
druid/src/main/java/org/apache/calcite/adapter/druid/DruidSqlCastConverter.java
Java
apache-2.0
5,454
package com.crossge.necessities.Commands; import com.crossge.necessities.Console; import com.crossge.necessities.Necessities; import com.crossge.necessities.RankManager.User; import com.crossge.necessities.RankManager.UserManager; import com.crossge.necessities.Variables; import org.bukkit.Bukkit; import org.bukkit.ChatColor; import org.bukkit.command.CommandSender; import org.bukkit.entity.Player; import java.util.UUID; public class CmdReply implements Cmd { public boolean commandUse(CommandSender sender, String[] args) { Variables var = Necessities.getVar(); if (args.length == 0) { sender.sendMessage(var.getEr() + "Error: " + var.getErMsg() + "You must enter a reply to send."); return true; } Console console = Necessities.getConsole(); UserManager um = Necessities.getUM(); if (sender instanceof Player) { Player p = (Player) sender; User self = um.getUser(p.getUniqueId()); if (self.isMuted()) { sender.sendMessage(var.getEr() + "Error: " + var.getErMsg() + "You are muted."); return true; } if (self.getLastC() == null) { sender.sendMessage(var.getEr() + "Error: " + var.getErMsg() + "You have not messaged anyone yet."); return true; } if (self.getLastC().equals("Console")) { StringBuilder messageBuilder = new StringBuilder(); for (String arg : args) messageBuilder.append(arg).append(" "); String message = ChatColor.WHITE + messageBuilder.toString().trim(); if (p.hasPermission("Necessities.colorchat")) message = ChatColor.translateAlternateColorCodes('&', (p.hasPermission("Necessities.magicchat") ? message : message.replaceAll("&k", ""))); self.setLastC("Console"); console.setLastContact(self.getUUID()); p.sendMessage(var.getMessages() + "[me -> " + console.getName().replaceAll(":", "") + "] " + message); Bukkit.getConsoleSender().sendMessage(var.getMessages() + "[" + p.getDisplayName() + var.getMessages() + " -> me] " + message); return true; } User u = um.getUser(UUID.fromString(self.getLastC())); Player t = Bukkit.getPlayer(u.getUUID()); if (t == null) { sender.sendMessage(var.getEr() + "Error: " + var.getErMsg() + "That player is not online."); return true; } if (u.isIgnoring(p.getUniqueId())) { sender.sendMessage(var.getEr() + "Error: " + var.getErMsg() + "That user is ignoring you, so you cannot reply."); return true; } if (self.isIgnoring(u.getUUID())) { sender.sendMessage(var.getEr() + "Error: " + var.getErMsg() + "You are ignoring that user, so cannot message them."); return true; } StringBuilder messageBuilder = new StringBuilder(); for (String arg : args) messageBuilder.append(arg).append(" "); String message = ChatColor.WHITE + messageBuilder.toString().trim(); if (p.hasPermission("Necessities.colorchat")) message = ChatColor.translateAlternateColorCodes('&', (p.hasPermission("Necessities.magicchat") ? message : message.replaceAll("&k", ""))); u.setLastC(self.getUUID().toString()); self.setLastC(u.getUUID().toString()); p.sendMessage(var.getMessages() + "[me -> " + t.getDisplayName() + var.getMessages() + "] " + message); t.sendMessage(var.getMessages() + "[" + p.getDisplayName() + var.getMessages() + " -> me] " + message); } else { if (console.getLastContact() == null) { sender.sendMessage(var.getEr() + "Error: " + var.getErMsg() + "You have not messaged anyone yet."); return true; } User u = um.getUser(console.getLastContact()); Player t = Bukkit.getPlayer(u.getUUID()); StringBuilder messageBuilder = new StringBuilder(); for (String arg : args) messageBuilder.append(arg).append(" "); String message = ChatColor.WHITE + messageBuilder.toString().trim(); message = ChatColor.translateAlternateColorCodes('&', message); u.setLastC("Console"); console.setLastContact(u.getUUID()); sender.sendMessage(var.getMessages() + "[me -> " + t.getDisplayName() + var.getMessages() + "] " + message); t.sendMessage(var.getMessages() + "[" + console.getName().replaceAll(":", "") + " -> me] " + message); } return true; } }
pupnewfster/Lavasurvival
necessities/src/main/java/com/crossge/necessities/Commands/CmdReply.java
Java
apache-2.0
4,844
package boa.test.datagen.queries; import static org.junit.Assert.assertEquals; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.io.FileUtils; import org.eclipse.jdt.core.JavaCore; import org.eclipse.jdt.core.dom.AST; import org.eclipse.jdt.core.dom.CompilationUnit; import org.eclipse.jgit.api.Git; import org.eclipse.jgit.api.errors.GitAPIException; import org.eclipse.jgit.api.errors.InvalidRemoteException; import org.eclipse.jgit.api.errors.TransportException; import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.Ref; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.revwalk.RevCommit; import org.eclipse.jgit.revwalk.RevSort; import org.eclipse.jgit.revwalk.RevWalk; import org.eclipse.jgit.storage.file.FileRepositoryBuilder; import org.eclipse.jgit.treewalk.TreeWalk; import org.junit.Before; import boa.datagen.forges.github.RepositoryCloner; import boa.datagen.scm.GitConnector; import boa.datagen.util.FileIO; import boa.evaluator.BoaEvaluator; public abstract class QueryTest { static Map<String, ObjectId> filePathGitObjectIds = new HashMap<String, ObjectId>(); protected static final ByteArrayOutputStream buffer = new ByteArrayOutputStream(4096); private static Repository repository; private static RevWalk revwalk; @Before public void prep() { final File outputDir = new File("test/datagen/temp_output"); if (outputDir.exists()) { try { FileUtils.deleteDirectory(outputDir); } catch (final IOException e) { e.printStackTrace(); } } } public String getResults(final File outputDir) { for (final File f : outputDir.listFiles()) { if (f.getName().startsWith("part")) { return FileIO.readFileContents(f); } } return ""; } public void queryTest(final String inputPath, final String expected) { final String[] args = { "-i", inputPath, "-d", "test/datagen/test_datagen", "-o", "test/datagen/temp_output" }; BoaEvaluator.main(args); final File outputDir = new File("test/datagen/temp_output"); final String actual = getResults(outputDir); try { FileUtils.deleteDirectory(outputDir); } catch (final IOException e) { e.printStackTrace(); } assertEquals(expected, actual); } protected static String getFileContents(final ObjectId fileid) { //ObjectId fileid = filePathGitObjectIds.get(path); try { buffer.reset(); buffer.write(repository.open(fileid, Constants.OBJ_BLOB).getCachedBytes()); } catch (final Throwable e) { } return buffer.toString(); } public List<String> setPaths() throws IOException { final File gitDir = new File("test/datagen/boalang/repos/boalang/test-datagen"); if (!gitDir.exists()) { final String url = "https://github.com/boalang/test-datagen.git"; try { RepositoryCloner.clone(new String[] { url, gitDir.getAbsolutePath() }); } catch (final InvalidRemoteException e1) { e1.printStackTrace(); } catch (final TransportException e1) { e1.printStackTrace(); } catch (final IOException e1) { e1.printStackTrace(); } catch (final GitAPIException e1) { e1.printStackTrace(); } } repository = new FileRepositoryBuilder().setGitDir(new File(gitDir + "/.git")).build(); final GitConnector gc = new GitConnector(gitDir.getAbsolutePath(), "test-datagen"); gc.setRevisions(); System.out.println("Finish processing commits"); System.out.println("Finish building head snapshot"); final List<String> snapshot2 = gc.getSnapshot(Constants.HEAD); gc.close(); revwalk = new RevWalk(repository); revwalk.reset(); final Set<RevCommit> heads = getHeads(); revwalk.markStart(heads); revwalk.sort(RevSort.TOPO, true); revwalk.sort(RevSort.COMMIT_TIME_DESC, true); revwalk.sort(RevSort.REVERSE, true); for (final RevCommit rc : revwalk) { final TreeWalk tw = new TreeWalk(repository); tw.reset(); try { tw.addTree(rc.getTree()); tw.setRecursive(true); while (tw.next()) if (!tw.isSubtree()) filePathGitObjectIds.put(tw.getPathString(), tw.getObjectId(0)); } catch (final IOException e) { } tw.close(); } return snapshot2; } protected void visitPath(final String path, final org.eclipse.jdt.core.dom.ASTVisitor visitor) { ObjectId oi = filePathGitObjectIds.get(path); final org.eclipse.jdt.core.dom.ASTParser parser = org.eclipse.jdt.core.dom.ASTParser.newParser(AST.JLS8); parser.setKind(org.eclipse.jdt.core.dom.ASTParser.K_COMPILATION_UNIT); final String content = getFileContents(oi); parser.setSource(content.toCharArray()); final Map<?, ?> options = JavaCore.getOptions(); JavaCore.setComplianceOptions(JavaCore.VERSION_1_8, options); parser.setCompilerOptions(options); try { CompilationUnit cu = (CompilationUnit) parser.createAST(null); cu.accept(visitor); } catch (final Throwable e) { } } private static Set<RevCommit> getHeads() { final Git git = new Git(repository); final Set<RevCommit> heads = new HashSet<RevCommit>(); try { for (final Ref ref : git.branchList().call()) { heads.add(revwalk.parseCommit(repository.resolve(ref.getName()))); } } catch (final GitAPIException e) { } catch (final IOException e) { } git.close(); return heads; } }
boalang/compiler
src/test/boa/test/datagen/queries/QueryTest.java
Java
apache-2.0
5,409
/* * Copyright (C) 2011 The Android Open Source Project Licensed under the Apache * License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law * or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package com.spollo.player.cache; // NOTE: upstream of this class is android.util.LruCache, changes below // expose trimToSize() to be called externally. import android.annotation.SuppressLint; import java.util.LinkedHashMap; import java.util.Map; /** * Static library version of {@link android.util.LruCache}. Used to write apps * that run on API levels prior to 12. When running on API level 12 or above, * this implementation is still used; it does not try to switch to the * framework's implementation. See the framework SDK documentation for a class * overview. */ public class LruCache<K, V> { private final LinkedHashMap<K, V> map; private final int maxSize; /** Size of this cache in units. Not necessarily the number of elements. */ private int size; private int putCount; private int createCount; private int evictionCount; private int hitCount; private int missCount; /** * @param maxSize for caches that do not override {@link #sizeOf}, this is * the maximum number of entries in the cache. For all other * caches, this is the maximum sum of the sizes of the entries in * this cache. */ public LruCache(final int maxSize) { if (maxSize <= 0) { throw new IllegalArgumentException("maxSize <= 0"); } this.maxSize = maxSize; this.map = new LinkedHashMap<K, V>(0, 0.75f, true); } /** * Returns the value for {@code key} if it exists in the cache or can be * created by {@code #create}. If a value was returned, it is moved to the * head of the queue. This returns null if a value is not cached and cannot * be created. */ public final V get(final K key) { if (key == null) { throw new NullPointerException("key == null"); } V mapValue; synchronized (this) { mapValue = map.get(key); if (mapValue != null) { this.hitCount++; return mapValue; } this.missCount++; } /* * Attempt to create a value. This may take a long time, and the map may * be different when create() returns. If a conflicting value was added * to the map while create() was working, we leave that value in the map * and release the created value. */ final V createdValue = create(key); if (createdValue == null) { return null; } synchronized (this) { this.createCount++; mapValue = map.put(key, createdValue); if (mapValue != null) { /* There was a conflict so undo that last put */ this.map.put(key, mapValue); } else { this.size += safeSizeOf(key, createdValue); } } if (mapValue != null) { entryRemoved(false, key, createdValue, mapValue); return mapValue; } else { trimToSize(maxSize); return createdValue; } } /** * Caches {@code value} for {@code key}. The value is moved to the head of * the queue. * * @return the previous value mapped by {@code key}. */ public final V put(final K key, final V value) { if (key == null || value == null) { throw new NullPointerException("key == null || value == null"); } V previous; synchronized (this) { this.putCount++; this.size += safeSizeOf(key, value); previous = this.map.put(key, value); if (previous != null) { this.size -= safeSizeOf(key, previous); } } if (previous != null) { entryRemoved(false, key, previous, value); } trimToSize(maxSize); return previous; } /** * @param maxSize the maximum size of the cache before returning. May be -1 * to evict even 0-sized elements. */ public void trimToSize(final int maxSize) { while (true) { K key; V value; synchronized (this) { if (this.size < 0 || this.map.isEmpty() && size != 0) { throw new IllegalStateException(getClass().getName() + ".sizeOf() is reporting inconsistent results!"); } if (this.size <= maxSize || this.map.isEmpty()) { break; } final Map.Entry<K, V> toEvict = this.map.entrySet().iterator().next(); key = toEvict.getKey(); value = toEvict.getValue(); this.map.remove(key); this.size -= safeSizeOf(key, value); this.evictionCount++; } entryRemoved(true, key, value, null); } } /** * Removes the entry for {@code key} if it exists. * * @return the previous value mapped by {@code key}. */ public final V remove(final K key) { if (key == null) { throw new NullPointerException("key == null"); } V previous; synchronized (this) { previous = this.map.remove(key); if (previous != null) { this.size -= safeSizeOf(key, previous); } } if (previous != null) { entryRemoved(false, key, previous, null); } return previous; } /** * Called for entries that have been evicted or removed. This method is * invoked when a value is evicted to make space, removed by a call to * {@link #remove}, or replaced by a call to {@link #put}. The default * implementation does nothing. * <p> * The method is called without synchronization: other threads may access * the cache while this method is executing. * * @param evicted true if the entry is being removed to make space, false if * the removal was caused by a {@link #put} or {@link #remove}. * @param newValue the new value for {@code key}, if it exists. If non-null, * this removal was caused by a {@link #put}. Otherwise it was * caused by an eviction or a {@link #remove}. */ protected void entryRemoved(final boolean evicted, final K key, final V oldValue, final V newValue) { } /** * Called after a cache miss to compute a value for the corresponding key. * Returns the computed value or null if no value can be computed. The * default implementation returns null. * <p> * The method is called without synchronization: other threads may access * the cache while this method is executing. * <p> * If a value for {@code key} exists in the cache when this method returns, * the created value will be released with {@link #entryRemoved} and * discarded. This can occur when multiple threads request the same key at * the same time (causing multiple values to be created), or when one thread * calls {@link #put} while another is creating a value for the same key. */ protected V create(final K key) { return null; } private int safeSizeOf(final K key, final V value) { final int result = sizeOf(key, value); if (result < 0) { throw new IllegalStateException("Negative size: " + key + "=" + value); } return result; } /** * Returns the size of the entry for {@code key} and {@code value} in * user-defined units. The default implementation returns 1 so that size is * the number of entries and max size is the maximum number of entries. * <p> * An entry's size must not change while it is in the cache. */ protected int sizeOf(final K key, final V value) { return 1; } /** * Clear the cache, calling {@link #entryRemoved} on each removed entry. */ public final void evictAll() { trimToSize(-1); // -1 will evict 0-sized elements } /** * For caches that do not override {@link #sizeOf}, this returns the number * of entries in the cache. For all other caches, this returns the sum of * the sizes of the entries in this cache. */ public synchronized final int size() { return this.size; } /** * For caches that do not override {@link #sizeOf}, this returns the maximum * number of entries in the cache. For all other caches, this returns the * maximum sum of the sizes of the entries in this cache. */ public synchronized final int maxSize() { return this.maxSize; } /** * Returns the number of times {@link #get} returned a value. */ public synchronized final int hitCount() { return this.hitCount; } /** * Returns the number of times {@link #get} returned null or required a new * value to be created. */ public synchronized final int missCount() { return this.missCount; } /** * Returns the number of times {@link #create(Object)} returned a value. */ public synchronized final int createCount() { return this.createCount; } /** * Returns the number of times {@link #put} was called. */ public synchronized final int putCount() { return this.putCount; } /** * Returns the number of values that have been evicted. */ public synchronized final int evictionCount() { return this.evictionCount; } /** * Returns a copy of the current contents of the cache, ordered from least * recently accessed to most recently accessed. */ public synchronized final Map<K, V> snapshot() { return new LinkedHashMap<K, V>(this.map); } @SuppressLint("DefaultLocale") @Override public synchronized final String toString() { final int accesses = this.hitCount + this.missCount; final int hitPercent = accesses != 0 ? 100 * this.hitCount / accesses : 0; return String.format("LruCache[maxSize=%d,hits=%d,misses=%d,hitRate=%d%%]", this.maxSize, this.hitCount, this.missCount, hitPercent); } }
i25ffz/spollo
src/com/spollo/player/cache/LruCache.java
Java
apache-2.0
10,949
/* * Copyright 2015 The UIMaster Project * * The UIMaster Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.shaolin.uimaster.page.ajax; import java.io.Serializable; import java.util.List; import org.shaolin.uimaster.page.AjaxContextHelper; import org.shaolin.uimaster.page.DisposableBfString; import org.shaolin.uimaster.page.HTMLUtil; import org.shaolin.uimaster.page.WebConfig; public class RadioButtonGroup extends SingleChoice<RadioButtonGroup> implements Serializable { private static final long serialVersionUID = -8629479224652156061L; public RadioButtonGroup(String uiid) { this(AjaxContextHelper.getAjaxContext().getEntityPrefix() + uiid, new CellLayout()); this.setListened(true); } public RadioButtonGroup(String id, Layout layout) { super(id, layout); } public boolean isHorizontalLayout() { return Boolean.parseBoolean((String)getAttribute("horizontalLayout")); } public void setHorizontalLayout(boolean horizontalLayout) { if(this.isHorizontalLayout() != horizontalLayout) { addAttribute("horizontalLayout", Boolean.valueOf(horizontalLayout).toString(), true); } } public String generateHTML() { String value = getValue(); List displayOptions = getOptionDisplayValues(); List options = getOptionValues(); if (displayOptions == null) { displayOptions = options; } StringBuilder html = DisposableBfString.getBuffer(); try { if (isVisible()) { html.append("<p name=\""+this.getId()+"\">"); } else { html.append("<p name=\""+this.getId()+"\" style=\"display:none\">"); } generateWidget(html); if (displayOptions != null && options != null) { if (displayOptions.size() == 0) { displayOptions = options; } if ( isReadOnly() ) { int position = options.indexOf(value);//whether contains this value. if( position != -1) { if (this.isValueMask()) { html.append(WebConfig.getHiddenValueMask()); } else { html.append(HTMLUtil.htmlEncode(displayOptions.get(position) .toString())); } html.append("<input type=\"hidden\" name=\""); html.append(getId()); html.append("\""); html.append(" value=\""); if (this.isValueMask()) { html.append(WebConfig.getHiddenValueMask()); } else { html.append(HTMLUtil.formatHtmlValue(value)); } html.append("\""); generateAttributes(html); generateEventListeners(html); html.append(" />"); } else { html.append("<input type=\"hidden\" name=\""); html.append(getId()); html.append("\""); html.append(" value='' />"); } } else { boolean horizontalLayout = Boolean .parseBoolean((String)getAttribute("horizontalLayout")); for (int i = 0; i < displayOptions.size(); i++) { String entryValue = this.isValueMask() ? WebConfig.getHiddenValueMask() : HTMLUtil.formatHtmlValue(options.get(i).toString()); String entryDisplayValue = this.isValueMask() ? WebConfig.getHiddenValueMask() : HTMLUtil.htmlEncode(displayOptions.get(i).toString()); html.append("<input type=\"radio\" name=\""); html.append(getId()); html.append("\""); html.append(" id=\""); html.append(entryValue); html.append("\""); html.append("value=\""); html.append(entryValue); html.append("\""); generateAttributes(html); generateEventListeners(html); if (value != null && value.equalsIgnoreCase(options.get(i).toString())) { html.append(" checked"); } html.append(" />"); html.append("<label for=\""); html.append(entryValue); html.append("\">"); html.append(entryDisplayValue); html.append("</label>"); if (!horizontalLayout) { html.append("<br />"); } } } } html.append("</p>"); return html.toString(); } finally { DisposableBfString.release(html); } } public RadioButtonGroup addConstraint(String name, Object[] value, String message) { if (name != null) { if (name.toLowerCase().equals("mustcheck")) { if (message != null) { super.addConstraint("mustCheckText", "'"+packMessageText(message)+"'", false); } super.addConstraint("mustCheck", joinArray(value)); } else { super.addConstraint(name, value, message); } } return this; } public String generateJS() { StringBuffer js = new StringBuffer(200); js.append("defaultname."); js.append(getId()); js.append("=new UIMaster.ui.radiobuttongroup({"); js.append("ui:elementList[\""); js.append(getId()); js.append("\"]"); js.append(super.generateJS()); js.append("});"); return js.toString(); } }
shaolinwu/uimaster
modules/uipage/src/main/java/org/shaolin/uimaster/page/ajax/RadioButtonGroup.java
Java
apache-2.0
6,859
/** */ package org.mobadsl.semantic.model.moba.provider; import java.util.Collection; import java.util.List; import org.eclipse.emf.common.notify.AdapterFactory; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.edit.provider.ComposeableAdapterFactory; import org.eclipse.emf.edit.provider.IItemPropertyDescriptor; import org.eclipse.emf.edit.provider.ItemPropertyDescriptor; import org.eclipse.emf.edit.provider.ViewerNotification; import org.mobadsl.semantic.model.moba.MobaPackage; import org.mobadsl.semantic.model.moba.MobaTransportSerializationType; /** * This is the item provider adapter for a {@link org.mobadsl.semantic.model.moba.MobaTransportSerializationType} object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public class MobaTransportSerializationTypeItemProvider extends MobaApplicationFeatureItemProvider { /** * This constructs an instance from a factory and a notifier. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MobaTransportSerializationTypeItemProvider(AdapterFactory adapterFactory) { super(adapterFactory); } /** * This returns the property descriptors for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) { if (itemPropertyDescriptors == null) { super.getPropertyDescriptors(object); addNamePropertyDescriptor(object); } return itemPropertyDescriptors; } /** * This adds a property descriptor for the Name feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addNamePropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_MobaTransportSerializationType_name_feature"), getString("_UI_PropertyDescriptor_description", "_UI_MobaTransportSerializationType_name_feature", "_UI_MobaTransportSerializationType_type"), MobaPackage.Literals.MOBA_TRANSPORT_SERIALIZATION_TYPE__NAME, true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } /** * This returns MobaTransportSerializationType.gif. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object getImage(Object object) { return overlayImage(object, getResourceLocator().getImage("full/obj16/MobaTransportSerializationType")); } /** * This returns the label text for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String getText(Object object) { String label = ((MobaTransportSerializationType)object).getName(); return label == null || label.length() == 0 ? getString("_UI_MobaTransportSerializationType_type") : getString("_UI_MobaTransportSerializationType_type") + " " + label; } /** * This handles model notifications by calling {@link #updateChildren} to update any cached * children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void notifyChanged(Notification notification) { updateChildren(notification); switch (notification.getFeatureID(MobaTransportSerializationType.class)) { case MobaPackage.MOBA_TRANSPORT_SERIALIZATION_TYPE__NAME: fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), false, true)); return; } super.notifyChanged(notification); } /** * This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children * that can be created under this object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) { super.collectNewChildDescriptors(newChildDescriptors, object); } }
florianpirchner/mobadsl
org.mobadsl.semantic.model.edit/src/org/mobadsl/semantic/model/moba/provider/MobaTransportSerializationTypeItemProvider.java
Java
apache-2.0
4,084
/* * Copyright 2016-2020 chronicle.software * * https://chronicle.software * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.openhft.chronicle.bytes; import net.openhft.chronicle.bytes.internal.BytesInternal; import net.openhft.chronicle.core.Maths; import net.openhft.chronicle.core.annotation.NonNegative; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.nio.BufferOverflowException; import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; import static net.openhft.chronicle.core.util.ObjectUtils.requireNonNull; @SuppressWarnings({"rawtypes", "unchecked"}) public interface RandomDataOutput<R extends RandomDataOutput<R>> extends RandomCommon { /** * Write a byte at an offset. * * @param offset to write to * @param i the value * @return this * @throws BufferOverflowException if the capacity was exceeded * @throws IllegalArgumentException if the value cannot be cast to the type without loss. */ @NotNull default R writeByte(long offset, int i) throws BufferOverflowException, IllegalArgumentException, ArithmeticException, IllegalStateException { return writeByte(offset, Maths.toInt8(i)); } /** * Write an unsigned byte at an offset. * * @param offset to write to * @param i the value * @return this * @throws BufferOverflowException if the capacity was exceeded * @throws IllegalArgumentException if the value cannot be cast to the type without loss. */ @NotNull default R writeUnsignedByte(long offset, int i) throws BufferOverflowException, IllegalArgumentException, ArithmeticException, IllegalStateException { return writeByte(offset, (byte) Maths.toUInt8(i)); } /** * Write a boolean at an offset. * * @param offset to write to * @param flag the value * @return this * @throws BufferOverflowException if the capacity was exceeded */ @NotNull default R writeBoolean(long offset, boolean flag) throws BufferOverflowException, IllegalStateException { try { return writeByte(offset, flag ? 'Y' : 'N'); } catch (IllegalArgumentException | ArithmeticException e) { throw new AssertionError(e); } } /** * Write an unsigned byte at an offset. * * @param offset to write to * @param i the value * @return this * @throws BufferOverflowException if the capacity was exceeded * @throws ArithmeticException if the value cannot be cast to the type without loss. */ @NotNull default R writeUnsignedShort(long offset, int i) throws BufferOverflowException, ArithmeticException, IllegalStateException { return writeShort(offset, (short) Maths.toUInt16(i)); } /** * Write an unsigned byte at an offset. * * @param offset to write to * @param i the value * @return this * @throws BufferOverflowException if the capacity was exceeded * @throws ArithmeticException if the value cannot be cast to the type without loss. */ @NotNull default R writeUnsignedInt(long offset, long i) throws BufferOverflowException, ArithmeticException, IllegalStateException { return writeInt(offset, (int) Maths.toUInt32(i)); } /** * Write an unsigned byte at an offset. * * @param offset to write to * @param i8 the value * @return this * @throws BufferOverflowException if the capacity was exceeded */ @NotNull R writeByte(long offset, byte i8) throws BufferOverflowException, IllegalStateException; /** * Write a short at an offset. * * @param offset to write to * @param i the value * @return this * @throws BufferOverflowException if the capacity was exceeded */ @NotNull R writeShort(long offset, short i) throws BufferOverflowException, IllegalStateException; @NotNull default R writeInt24(long offset, int i) throws BufferOverflowException, IllegalStateException { writeShort(offset, (short) i); return writeByte(offset + 2, (byte) (i >> 16)); } /** * Write an int at an offset. * * @param offset to write to * @param i the value * @return this * @throws BufferOverflowException if the capacity was exceeded */ @NotNull R writeInt(long offset, int i) throws BufferOverflowException, IllegalStateException; /** * Perform a non stalling write with a store barrier. * * @param offset to write to * @param i value to write * @return this * @throws BufferOverflowException if the capacity was exceeded */ @NotNull R writeOrderedInt(long offset, int i) throws BufferOverflowException, IllegalStateException; /** * Perform a non stalling write with a store barrier. * * @param offset to write to * @param f value to write * @return this * @throws BufferOverflowException if the capacity was exceeded */ @NotNull default R writeOrderedFloat(long offset, float f) throws BufferOverflowException, IllegalStateException { return writeOrderedInt(offset, Float.floatToRawIntBits(f)); } /** * Write a long at an offset. * * @param offset to write to * @param i the value * @return this * @throws BufferOverflowException if the capacity was exceeded */ @NotNull R writeLong(long offset, long i) throws BufferOverflowException, IllegalStateException; /** * Perform a non stalling write with a store barrier. * * @param offset to write to * @param i value to write * @return this */ @NotNull R writeOrderedLong(long offset, long i) throws BufferOverflowException, IllegalStateException; /** * Perform a non stalling write with a store barrier. * * @param offset to write to * @param d value to write * @return this */ @NotNull default R writeOrderedDouble(long offset, double d) throws BufferOverflowException, IllegalStateException { return writeOrderedLong(offset, Double.doubleToRawLongBits(d)); } /** * Write a float at an offset. * * @param offset to write to * @param d the value * @return this * @throws BufferOverflowException if the capacity was exceeded */ @NotNull R writeFloat(long offset, float d) throws BufferOverflowException, IllegalStateException; /** * Write a double at an offset. * * @param offset to write to * @param d the value * @return this * @throws BufferOverflowException if the capacity was exceeded */ @NotNull R writeDouble(long offset, double d) throws BufferOverflowException, IllegalStateException; @NotNull R writeVolatileByte(long offset, byte i8) throws BufferOverflowException, IllegalStateException; @NotNull R writeVolatileShort(long offset, short i16) throws BufferOverflowException, IllegalStateException; @NotNull R writeVolatileInt(long offset, int i32) throws BufferOverflowException, IllegalStateException; @NotNull R writeVolatileLong(long offset, long i64) throws BufferOverflowException, IllegalStateException; @NotNull default R writeVolatileFloat(long offset, float f) throws BufferOverflowException, IllegalStateException { return writeVolatileInt(offset, Float.floatToRawIntBits(f)); } @NotNull default R writeVolatileDouble(long offset, double d) throws BufferOverflowException, IllegalStateException { return writeVolatileLong(offset, Double.doubleToRawLongBits(d)); } /** * Copies whole byte[] into this. See {@link #write(long, byte[], int, int)} */ @NotNull default R write(long offsetInRDO, @NotNull byte[] bytes) throws BufferOverflowException, IllegalStateException { requireNonNull(bytes); return write(offsetInRDO, bytes, 0, bytes.length); } /** * Copies the provided {@code byteArray} to this Bytes object starting at {@code writeOffset} taking * content starting at {@code readOffset} but copying at most {@code length} bytes. * <p> * Does not update cursors e.g. {@link #writePosition} * * @param writeOffset non-negative offset to write to * @param byteArray non-null copy from byteArray * @param readOffset non-negative copy from offset * @param length non-negative length to copy * @return this * @throws BufferOverflowException if this Bytes object cannot accommodate all the bytes to copy. * @throws IllegalStateException if this Bytes object has been previously released * @throws IllegalArgumentException if the provided {@code writeOffset}, {@code readOffset } or {@code length} is negative * @throws NullPointerException if the provided {@code byteArray} is {@code null} */ @NotNull R write(@NonNegative long writeOffset, byte[] byteArray, @NonNegative int readOffset, @NonNegative int length) throws BufferOverflowException, IllegalStateException; /** * Copy from ByteBuffer into this. * <p> * Does not update cursors e.g. {@link #writePosition} * * @param writeOffset offset to write to * @param bytes copy from bytes * @param readOffset copy from offset * @param length * @throws BufferOverflowException * @throws IllegalStateException */ void write(long writeOffset, @NotNull ByteBuffer bytes, int readOffset, int length) throws BufferOverflowException, IllegalStateException; /** * Copies whole BytesStore into this - see {@link #write(long, RandomDataInput, long, long)} */ @NotNull default R write(long offsetInRDO, @NotNull BytesStore bytes) throws BufferOverflowException, IllegalStateException { requireNonNull(bytes); try { return write(offsetInRDO, bytes, bytes.readPosition(), bytes.readRemaining()); } catch (BufferUnderflowException e) { throw new AssertionError(e); } } /** * Copy from RandomDataInput into this. Does not bump {@link #writePosition} nor {@link RandomDataInput#readPosition()} * * @param writeOffset offset to write to * @param bytes copy from bytes * @param readOffset copy from offset * @param length * @return this * @throws BufferOverflowException * @throws IllegalStateException */ @NotNull R write(long writeOffset, @NotNull RandomDataInput bytes, long readOffset, long length) throws BufferOverflowException, BufferUnderflowException, IllegalStateException; /** * Zero out the bytes between the start and the end. * * @param start index of first byte inclusive * @param end index of last byte exclusive * @return this * @throws BufferOverflowException if the capacity was exceeded */ @NotNull R zeroOut(long start, long end) throws IllegalStateException; @NotNull default R append(long offset, long value, int digits) throws BufferOverflowException, IllegalArgumentException, IllegalStateException { BytesInternal.append(this, offset, value, digits); return (R) this; } @NotNull default R append(long offset, double value, int decimalPlaces, int digits) throws BufferOverflowException, IllegalArgumentException, IllegalStateException, ArithmeticException { if (decimalPlaces < 20) { double d2 = value * Maths.tens(decimalPlaces); if (d2 <= Long.MAX_VALUE && d2 >= Long.MIN_VALUE) { BytesInternal.appendDecimal(this, Math.round(d2), offset, decimalPlaces, digits); return (R) this; } } BytesInternal.append((StreamingDataOutput) this, value); return (R) this; } /** * expert level method to copy data from native memory into the BytesStore * * @param address in native memory to copy from * @param position in BytesStore to copy to * @param size in bytes */ void nativeWrite(long address, long position, long size) throws BufferOverflowException, IllegalStateException; /** * Writes the provided {@code text} into this {@code RandomDataOutput} writing at the given {@code writeOffset}, * in Utf8 format. Returns the new write position after writing the provided {@code text}. * * @param writeOffset the writeOffset to write char sequence from * @param text the char sequence to write, could be {@code null} * @return the writeOffset after the char sequence written, in this {@code RandomDataOutput} * @see RandomDataInput#readUtf8(long, Appendable) */ default long writeUtf8(long writeOffset, @Nullable CharSequence text) throws BufferOverflowException, IllegalStateException, ArithmeticException { return BytesInternal.writeUtf8(this, writeOffset, text); } /** * Writes the given {@code text} to this {@code RandomDataOutput} writing at the provided {@code writeOffset}, * in Utf8 format, checking that the utf8 encoding size of the given char sequence is less or * equal to the provided {@code maxUtf8Len}, otherwise {@code IllegalArgumentException} is thrown, * and no bytes of this {@code RandomDataOutput} are overwritten. Returns the new write position after * writing the provided {@code text} * * @param writeOffset the writeOffset to write char sequence from * @param text the char sequence to write, could be {@code null} * @param maxUtf8Len the maximum allowed length (in Utf8 encoding) of the given char sequence * @return the writeOffset after the char sequence written, in this {@code RandomDataOutput} * @throws IllegalArgumentException if the given char sequence size in Utf8 encoding exceeds * maxUtf8Len * @see RandomDataInput#readUtf8Limited(long, Appendable, int) * @see RandomDataInput#readUtf8Limited(long, int) */ default long writeUtf8Limited(long writeOffset, @Nullable CharSequence text, int maxUtf8Len) throws BufferOverflowException, IllegalStateException, ArithmeticException { return BytesInternal.writeUtf8(this, writeOffset, text, maxUtf8Len); } /** * Write the stop bit length and copy the BytesStore * * @param position to write * @param bs to copy. * @return the offset after the char sequence written, in this {@code RandomDataOutput} */ long write8bit(long position, @NotNull BytesStore bs); long write8bit(long position, @NotNull String s, int start, int length); }
OpenHFT/Chronicle-Bytes
src/main/java/net/openhft/chronicle/bytes/RandomDataOutput.java
Java
apache-2.0
15,803
package zolota; import java.util.concurrent.TimeUnit; import org.openqa.selenium.Platform; import org.openqa.selenium.WebDriver; import org.openqa.selenium.remote.DesiredCapabilities; import org.testng.annotations.AfterSuite; import org.testng.annotations.BeforeClass; import ru.stqa.selenium.factory.WebDriverFactory; import zolota.util.PropertyLoader; /** * Base class for all the TestNG-based test classes */ public class TestBase { protected WebDriver driver; protected String gridHubUrl; protected String baseUrl; @BeforeClass public void init() { baseUrl = PropertyLoader.loadProperty("site.url"); gridHubUrl = PropertyLoader.loadProperty("grid2.hub"); DesiredCapabilities capabilities = new DesiredCapabilities(); capabilities.setBrowserName(PropertyLoader.loadProperty("browser.name")); capabilities.setVersion(PropertyLoader.loadProperty("browser.version")); String platform = PropertyLoader.loadProperty("browser.platform"); if (!(null == platform || "".equals(platform))) { capabilities.setPlatform(Platform.valueOf(PropertyLoader.loadProperty("browser.platform"))); } if (!(null == gridHubUrl || "".equals(gridHubUrl))) { driver = WebDriverFactory.getDriver(gridHubUrl, capabilities); } else { driver = WebDriverFactory.getDriver(capabilities); } driver.manage().timeouts().implicitlyWait(30, TimeUnit.SECONDS); } @AfterSuite(alwaysRun = true) public void tearDown() { if (driver != null) { WebDriverFactory.dismissDriver(driver); } } }
zolotarevskaya/selenium_java
src/test/java/zolota/TestBase.java
Java
apache-2.0
1,511
import { helper } from '@ember/component/helper'; export function runTime(params) { var s = moment(params[0]); var e = moment(params[1]); var time = Math.round(e.diff(s)/100)/10; if ( time ) { if ( time > 60 ) { time = Math.round(time); } return `${time} sec`; } else { return '<span class="text-muted">-</span>'.htmlSafe(); } } export default helper(runTime);
pengjiang80/ui
lib/shared/addon/helpers/run-time.js
JavaScript
apache-2.0
409
#!/usr/bin/env python import wx import logging import webbrowser import ConfigParser import argparse import wx.lib.delayedresult as dr import wx.lib.agw.hyperlink as hl from os import path, makedirs from distutils.version import LooseVersion from github3 import GitHub from GUI import UploaderAppFrame, SettingsDialog from appdirs import user_config_dir, user_log_dir from wx.lib.pubsub import pub path_to_module = path.dirname(__file__) app_config = path.join(path_to_module, 'irida-uploader.cfg') if not path.isfile(app_config): app_config = path.join(path_to_module, '..', 'irida-uploader.cfg') if not path.exists(user_log_dir("iridaUploader")): makedirs(user_log_dir("iridaUploader")) log_format = '%(asctime)s %(levelname)s\t%(filename)s:%(funcName)s:%(lineno)d - %(message)s' # if any logging gets called before `basicConfig`, our attempts to configure the # logging here will be clobbered. This removes any existing handlers that might # have been set up when some other log message was printed, so that we can # actually configure the logging the way we want. logging.getLogger().handlers = [] logging.basicConfig(level=logging.DEBUG, filename=path.join(user_log_dir("iridaUploader"), 'irida-uploader.log'), format=log_format, filemode='w') console = logging.StreamHandler() console.setLevel(logging.DEBUG) console.setFormatter(logging.Formatter(log_format)) logging.getLogger().addHandler(console) class Uploader(wx.App): def __init__(self, show_new_ui=False, redirect=False, filename=None): wx.App.__init__(self, redirect, filename) self.get_app_info() self.check_for_update() user_config_file = path.join(user_config_dir("iridaUploader"), "config.conf") if not path.exists(user_config_file): dialog = SettingsDialog(first_run=True) dialog.ShowModal() self._show_main_app() def _show_main_app(self): frame = UploaderAppFrame(app_name=self.__app_name__, app_version=self.__app_version__, app_url=self.url) frame.Show() def get_app_info(self): config_parser = ConfigParser.ConfigParser() config_parser.read(app_config) self.__app_version__ = config_parser.get('Application', 'version', None) self.__app_name__ = config_parser.get('Application', 'name', None) @property def url(self): return "https://github.com/phac-nml/irida-miseq-uploader" def check_for_update(self): def find_update(): logging.debug("Checking remote for new updates.") try: gh = GitHub() repo = gh.repository("phac-nml", "irida-miseq-uploader") # get the latest tag from github return next(repo.iter_tags(number=1)) except: logging.warn("Couldn't reach github to check for new version.") raise def handle_update(result): latest_tag = result.get() logging.debug("Found latest version: [{}]".format(latest_tag)) release_url = self.url + "/releases/latest" if LooseVersion(self.__app_version__) < LooseVersion(latest_tag.name): logging.info("Newer version found.") dialog = NewVersionMessageDialog( parent=None, id=wx.ID_ANY, message=("A new version of the IRIDA MiSeq " "Uploader tool is available. You can" " download the latest version from "), title="IRIDA MiSeq Uploader update available", download_url=release_url, style=wx.CAPTION|wx.CLOSE_BOX|wx.STAY_ON_TOP) dialog.ShowModal() dialog.Destroy() else: logging.debug("No new versions found.") dr.startWorker(handle_update, find_update) class NewVersionMessageDialog(wx.Dialog): def __init__(self, parent, id, title, message, download_url, size=wx.DefaultSize, pos=wx.DefaultPosition, style=wx.DEFAULT_DIALOG_STYLE, name='dialog'): wx.Dialog.__init__(self, parent, id, title, pos, size, style, name) label = wx.StaticText(self, label=message) button = wx.Button(self, id=wx.ID_OK, label="Close") button.SetDefault() line = wx.StaticLine(self, wx.ID_ANY, size=(20, -1), style=wx.LI_HORIZONTAL) download_ctrl = hl.HyperLinkCtrl(self, wx.ID_ANY, download_url, URL=download_url) sizer = wx.BoxSizer(wx.VERTICAL) button_sizer = wx.StdDialogButtonSizer() button_sizer.AddButton(button) button_sizer.Realize() sizer.Add(label, 0, wx.ALIGN_CENTER|wx.ALL, 5) sizer.Add(download_ctrl, 0, wx.ALL, 10) sizer.Add(line, 0, wx.GROW|wx.ALIGN_CENTER_VERTICAL|wx.RIGHT|wx.TOP, 5) sizer.Add(button_sizer, 0, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 5) self.SetSizer(sizer) sizer.Fit(self) def main(): app = Uploader() app.MainLoop() if __name__ == "__main__": main()
phac-nml/irida-miseq-uploader
run_IRIDA_Uploader.py
Python
apache-2.0
5,119
package sivan.yue.nlp.common.dataAlgo.matrix; /** * Created by xiwen.yxw on 2017/3/24. */ public abstract class BaseTMatrix implements ITMatrix{ protected int xNum; protected int yNum; protected int zNum; public BaseTMatrix(int xNum, int yNum, int zNum) { this.xNum = xNum; this.yNum = yNum; this.zNum = zNum; } @Override public double get(int x, int y, int z) { return getV(x, y, z); } @Override public void put(int x, int y, int z, double v, double d) { putV(x, y, z, v, d); } public void put(int x, int y, int z, double v) { putV(x, y, z, v, 0); } @Override public int getXNum() { return this.xNum; } @Override public int getYNum() { return this.yNum; } @Override public int getZNum() { return this.zNum; } protected abstract double getV(int x, int y, int z); protected abstract void putV(int x, int y, int z, double v, double d); }
FreeSivan/Rorschach
src/main/java/sivan/yue/nlp/common/dataAlgo/matrix/BaseTMatrix.java
Java
apache-2.0
1,019
package net.sundell.otter; import java.io.IOException; import java.io.PushbackReader; import java.io.Reader; import java.util.ArrayList; import java.util.Date; import java.util.List; import javax.xml.namespace.QName; import javax.xml.stream.Location; import javax.xml.stream.events.Attribute; import javax.xml.stream.events.StartElement; class Util { static Reader stripBOM(Reader r) throws IOException { PushbackReader pushback = new PushbackReader(r, 1); int bom = pushback.read(); if (bom != -1 && bom != '\uFEFF') { pushback.unread(bom); } return pushback; } static void require(boolean condition, Location location, String message) { if (!condition) { throw new OtterInputException(message, location); } } static String attrVal(StartElement el, QName attrName) { Attribute a = el.getAttributeByName(attrName); return (a == null) ? null : a.getValue(); } static String requireAttrVal(StartElement el, QName attrName, ErrorHandler handler) { Attribute a = el.getAttributeByName(attrName); if (a == null) { handler.fatalError( new OtterInputException("Required attribute " + attrName + " is missing", el.getLocation())); return null; } return a.getValue(); } static Integer attrValAsInteger(StartElement el, QName attrName) { Attribute a = el.getAttributeByName(attrName); if (a == null) return null; try { return Integer.valueOf(a.getValue()); } catch (NumberFormatException e) { throw new OtterInputException("Not an integer value: " + a.getValue(), el.getLocation()); } } /** * If an ErrorHandler is specified, this will report an error via the error() method. * Otherwise, the error is reported as an OtterException. */ static Date attrValAsDate(StartElement el, QName attrName, TMXDateParser dateParser, ErrorHandler handler) throws OtterException { Attribute a = el.getAttributeByName(attrName); if (a == null) return null; Date d = dateParser.parseDate(a.getValue()); if (d == null) { OtterInputException e = new OtterInputException("Invalid date format '" + a.getValue() + "' for " + attrName.getLocalPart(), el.getLocation()); if (handler != null) { handler.error(e); } else { throw e; } } return d; } static List<TUVContent> normalizeWhitespace(List<TUVContent> orig) { List<TUVContent> normalized = new ArrayList<>(orig.size()); for (int i = 0; i < orig.size(); i++) { TUVContent content = orig.get(i); if (!(content instanceof TextContent)) { normalized.add(content); continue; } String text = ((TextContent)content).getValue(); normalized.add(new TextContent(normalizeWhitespace(text, i == 0, i + 1 == orig.size()))); } return normalized; } // Horrendous private static String normalizeWhitespace(String text, boolean trimLeading, boolean trimTrailing) { StringBuilder sb = new StringBuilder(); int spaceCount = 0; char[] raw = text.toCharArray(); int leadingSpaceCount = trimLeading ? text.indexOf(text.trim()) : 0; for (int i = leadingSpaceCount; i < raw.length; i++) { char c = raw[i]; if (!(Character.isWhitespace(c))) { sb.append(c); spaceCount = 0; continue; } if (spaceCount == 0) { sb.append(' '); } spaceCount++; } String temp = sb.toString(); if (trimTrailing) { int end = temp.length(); while (end > 0 && Character.isWhitespace(temp.charAt(end - 1))) { end--; } temp = temp.substring(0, end); } return temp; } }
tingley/otter
src/main/java/net/sundell/otter/Util.java
Java
apache-2.0
4,256
/* * Licensed to The Apereo Foundation under one or more contributor license * agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * The Apereo Foundation licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. * */ package org.unitime.timetable.action; import java.io.OutputStream; import java.util.ArrayList; import java.util.Collections; import java.util.List; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.struts.action.Action; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.apache.struts.action.ActionMessages; import org.cpsolver.ifs.util.DataProperties; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.unitime.timetable.defaults.ApplicationProperty; import org.unitime.timetable.form.ExamSolverForm; import org.unitime.timetable.model.DepartmentStatusType; import org.unitime.timetable.model.Session; import org.unitime.timetable.model.dao.SessionDAO; import org.unitime.timetable.security.SessionContext; import org.unitime.timetable.security.rights.Right; import org.unitime.timetable.solver.exam.ExamSolverProxy; import org.unitime.timetable.solver.jgroups.SolverServer; import org.unitime.timetable.solver.service.SolverServerService; import org.unitime.timetable.solver.service.SolverService; import org.unitime.timetable.util.ExportUtils; import org.unitime.timetable.util.LookupTables; import org.unitime.timetable.util.RoomAvailability; /** * @author Tomas Muller */ @Service("/examSolver") public class ExamSolverAction extends Action { @Autowired SolverService<ExamSolverProxy> examinationSolverService; @Autowired SessionContext sessionContext; @Autowired SolverServerService solverServerService; public ActionForward execute(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { ExamSolverForm myForm = (ExamSolverForm) form; // Check Access sessionContext.checkPermission(Right.ExaminationSolver); if (sessionContext.getUser().getCurrentAuthority().hasRight(Right.CanSelectSolverServer)) { List<String> hosts = new ArrayList<String>(); for (SolverServer server: solverServerService.getServers(true)) hosts.add(server.getHost()); Collections.sort(hosts); if (ApplicationProperty.SolverLocalEnabled.isTrue()) hosts.add(0, "local"); hosts.add(0, "auto"); request.setAttribute("hosts", hosts); } // Read operation to be performed String op = (myForm.getOp()!=null?myForm.getOp():request.getParameter("op")); ExamSolverProxy solver = examinationSolverService.getSolver(); Session acadSession = SessionDAO.getInstance().get(sessionContext.getUser().getCurrentAcademicSessionId()); RoomAvailability.setAvailabilityWarning(request, acadSession, (solver==null?myForm.getExamType():solver.getExamTypeId()), true, false); LookupTables.setupExamTypes(request, sessionContext.getUser(), DepartmentStatusType.Status.ExamTimetable); if (op==null) { myForm.init("y".equals(request.getParameter("reload"))); return mapping.findForward("showSolver"); } if ("Export XML".equals(op)) { if (solver==null) throw new Exception("Solver is not started."); if (solver.isWorking()) throw new Exception("Solver is working, stop it first."); sessionContext.checkPermission(Right.ExaminationSolutionExportXml); byte[] buf = solver.exportXml(); OutputStream out = ExportUtils.getXmlOutputStream(response, "solution"); out.write(buf); out.flush(); out.close(); return null; } if ("Restore From Best".equals(op)) { if (solver==null) throw new Exception("Solver is not started."); if (solver.isWorking()) throw new Exception("Solver is working, stop it first."); solver.restoreBest(); } if ("Store To Best".equals(op)) { if (solver==null) throw new Exception("Solver is not started."); if (solver.isWorking()) throw new Exception("Solver is working, stop it first."); solver.saveBest(); } if (op.startsWith("Save")) { if (solver==null) throw new Exception("Solver is not started."); if (solver.isWorking()) throw new Exception("Solver is working, stop it first."); solver.save(); } if ("Unload".equals(op)) { if (solver==null) throw new Exception("Solver is not started."); if (solver.isWorking()) throw new Exception("Solver is working, stop it first."); examinationSolverService.removeSolver(); myForm.reset(mapping, request); myForm.init(false); } if ("Clear".equals(op)) { if (solver==null) throw new Exception("Solver is not started."); if (solver.isWorking()) throw new Exception("Solver is working, stop it first."); solver.clear(); } // Reload if ("Reload Input Data".equals(op)) { if (solver==null) throw new Exception("Solver is not started."); if (solver.isWorking()) throw new Exception("Solver is working, stop it first."); ActionMessages errors = myForm.validate(mapping, request); if(errors.size()>0) { saveErrors(request, errors); return mapping.findForward("showSolver"); } DataProperties config = examinationSolverService.createConfig(myForm.getSetting(), myForm.getParameterValues()); config.setProperty("Exam.Type", String.valueOf(myForm.getExamType())); request.getSession().setAttribute("Exam.Type", myForm.getExamType()); examinationSolverService.reload(config); } if ("Start".equals(op) || "Load".equals(op)) { boolean start = "Start".equals(op); if (solver!=null && solver.isWorking()) throw new Exception("Solver is working, stop it first."); ActionMessages errors = myForm.validate(mapping, request); if(errors.size()>0) { saveErrors(request, errors); return mapping.findForward("showSolver"); } DataProperties config = examinationSolverService.createConfig(myForm.getSetting(), myForm.getParameterValues()); config.put("Exam.Type", String.valueOf(myForm.getExamType())); config.put("General.StartSolver", Boolean.valueOf(start).toString()); request.getSession().setAttribute("Exam.Type", myForm.getExamType()); if (myForm.getHost() != null) config.setProperty("General.Host", myForm.getHost()); if (solver == null) { solver = examinationSolverService.createSolver(config); } else if (start) { solver.setProperties(config); solver.start(); } } if ("Stop".equals(op)) { if (solver==null) throw new Exception("Solver is not started."); if (solver.isRunning()) solver.stopSolver(); myForm.reset(mapping, request); myForm.init(false); } if ("Refresh".equals(op)) { myForm.reset(mapping, request); myForm.init(false); } return mapping.findForward("showSolver"); } }
UniTime/unitime
JavaSource/org/unitime/timetable/action/ExamSolverAction.java
Java
apache-2.0
8,157
<?php namespace formalizer; error_reporting(E_ALL); /** * @todo RadioList/Select, any element with a given set of values, automatically * add an EnumRule to it * @todo PhoneRule - check for phone number * @todo CaptchaRule - check captcha submission * @todo PasswordRule - needs capital, symbol, number * @todo PayPal buy now button and perhaps more paypal related crap * @todo CaptchaElement - add a captcha * @todo CheckboxElement - add a checkbox or list of checkboxes * @todo PasswordElement - password element * @todo SelectElement - select box, allow optgroup as well * @todo RadioElement - list of radios, maybe each need label? * @todo CheckboxElement - single or list of checkboxes * @todo PhoneElement - type=tel * @todo SearchElement - type=search * @todo UrlElement - type=url * @todo EmailElement - type=email * @todo DateElement - type=date * @todo DateTimeElement - type=datetime * @todo DateTimeLocalElement - type=datetimelocal * @todo MonthElement - type=month * @todo WeekElement - type=week * @todo TimeElement - type=time * @todo NumberElement - type=number * @todo RangeElement - type=range * @todo ColorElement - type=color * @todo ProvinceElement - select box of provinces, specify use or ignore certain countries * @todo CountryElement - select box of countries, specify use or ignore certain countries * @todo YesNoElement - two radios * @todo MultipleFileElement - multiple file upload * @todo DatalistElement - some input which has a datalist * @todo ImageElement - type=image * @todo ResetElement - type=reset */ class ZFutureList {} //class to get picked up by apigen, Z so it appears the bottom /** * Enter your recaptcha public/private keys */ define('RECAPTCHA_PUBLIC_KEY', '6LfrPgUAAAAAAFb3mqBDuYFI24ATFmNsn3gvnKmi'); define('RECAPTCHA_PRIVATE_KEY', '6LfrPgUAAAAAAJpxTX3xnCEpYcSSSzY05ZS3Cq6A'); /** * Where the formalizer library is location on your server. This points to the * root directory of it and should end in a slash. */ define('FORMALIZER_DIRECTORY', $_SERVER['DOCUMENT_ROOT'] . '/formalizer/'); /** * formalizer autoloader. Ensure the formalizer directory constant above is correct * or the loader will fail. */ spl_autoload_register(function($class){ $file = FORMALIZER_DIRECTORY . str_replace('\\', '/', $class) . '.php'; if (file_exists($file)){ include $file; } }); /** * The escape function formalizer uses on it's values. Edit to suit your needs. * * @param string $value the value to escape * @return string the escaped value */ function escape($value){ return addslashes(trim($value)); } /** * The sanitization function formalizer uses on it's values. By default it encodes * html to prevent injection. Edit it to suit your needs. * * Note that other sanitization filters shouldn't be needed. In most cases, you * can add a rule to an element to ensure the value is in the format you need it to be. * * @param string $value the value to sanitize * @return string the sanitized value */ function sanitize($value){ return filter_var($value, FILTER_SANITIZE_STRING); }
twentylemon/formalizer
formalizer.php
PHP
apache-2.0
3,197
<?php use Symfony\Component\HttpKernel\Kernel; use Symfony\Component\Config\Loader\LoaderInterface; class AppKernel extends Kernel { public function registerBundles() { $bundles = [ new Symfony\Bundle\FrameworkBundle\FrameworkBundle(), new Symfony\Bundle\SecurityBundle\SecurityBundle(), new Symfony\Bundle\TwigBundle\TwigBundle(), new Symfony\Bundle\MonologBundle\MonologBundle(), new Symfony\Bundle\SwiftmailerBundle\SwiftmailerBundle(), new Doctrine\Bundle\DoctrineBundle\DoctrineBundle(), new Sensio\Bundle\FrameworkExtraBundle\SensioFrameworkExtraBundle(), new Tiloweb\PaginationBundle\TilowebPaginationBundle(), new AppBundle\AppBundle(), ]; if (in_array($this->getEnvironment(), ['dev', 'test'], true)) { $bundles[] = new Symfony\Bundle\DebugBundle\DebugBundle(); $bundles[] = new Symfony\Bundle\WebProfilerBundle\WebProfilerBundle(); $bundles[] = new Sensio\Bundle\DistributionBundle\SensioDistributionBundle(); $bundles[] = new Sensio\Bundle\GeneratorBundle\SensioGeneratorBundle(); } return $bundles; } public function getRootDir() { return __DIR__; } public function getCacheDir() { return dirname(__DIR__).'/var/cache/'.$this->getEnvironment(); } public function getLogDir() { return dirname(__DIR__).'/var/logs'; } public function registerContainerConfiguration(LoaderInterface $loader) { $loader->load($this->getRootDir().'/config/config_'.$this->getEnvironment().'.yml'); } }
Tilotiti/Pokemon
app/AppKernel.php
PHP
apache-2.0
1,687
# Copyright 2012 Google Inc. All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """A test module in a sub-package.""" __author__ = 'schuppe@google.com (Robert Schuppenies)' import unittest class FooTest(unittest.TestCase): def test_pass(self): pass def test_fail(self): self.assertTrue(False)
zenlambda/aeta
testdata/test_modules/sample_package/subpackage/test_ham.py
Python
apache-2.0
825
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * LineItemTemplateServiceSoapBindingStub.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.admanager.axis.v202111; public class LineItemTemplateServiceSoapBindingStub extends org.apache.axis.client.Stub implements com.google.api.ads.admanager.axis.v202111.LineItemTemplateServiceInterface { private java.util.Vector cachedSerClasses = new java.util.Vector(); private java.util.Vector cachedSerQNames = new java.util.Vector(); private java.util.Vector cachedSerFactories = new java.util.Vector(); private java.util.Vector cachedDeserFactories = new java.util.Vector(); static org.apache.axis.description.OperationDesc [] _operations; static { _operations = new org.apache.axis.description.OperationDesc[1]; _initOperationDesc1(); } private static void _initOperationDesc1(){ org.apache.axis.description.OperationDesc oper; org.apache.axis.description.ParameterDesc param; oper = new org.apache.axis.description.OperationDesc(); oper.setName("getLineItemTemplatesByStatement"); param = new org.apache.axis.description.ParameterDesc(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "filterStatement"), org.apache.axis.description.ParameterDesc.IN, new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "Statement"), com.google.api.ads.admanager.axis.v202111.Statement.class, false, false); param.setOmittable(true); oper.addParameter(param); oper.setReturnType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "LineItemTemplatePage")); oper.setReturnClass(com.google.api.ads.admanager.axis.v202111.LineItemTemplatePage.class); oper.setReturnQName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "rval")); oper.setStyle(org.apache.axis.constants.Style.WRAPPED); oper.setUse(org.apache.axis.constants.Use.LITERAL); oper.addFault(new org.apache.axis.description.FaultDesc( new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ApiExceptionFault"), "com.google.api.ads.admanager.axis.v202111.ApiException", new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ApiException"), true )); _operations[0] = oper; } public LineItemTemplateServiceSoapBindingStub() throws org.apache.axis.AxisFault { this(null); } public LineItemTemplateServiceSoapBindingStub(java.net.URL endpointURL, javax.xml.rpc.Service service) throws org.apache.axis.AxisFault { this(service); super.cachedEndpoint = endpointURL; } public LineItemTemplateServiceSoapBindingStub(javax.xml.rpc.Service service) throws org.apache.axis.AxisFault { if (service == null) { super.service = new org.apache.axis.client.Service(); } else { super.service = service; } ((org.apache.axis.client.Service)super.service).setTypeMappingVersion("1.2"); java.lang.Class cls; javax.xml.namespace.QName qName; javax.xml.namespace.QName qName2; java.lang.Class beansf = org.apache.axis.encoding.ser.BeanSerializerFactory.class; java.lang.Class beandf = org.apache.axis.encoding.ser.BeanDeserializerFactory.class; java.lang.Class enumsf = org.apache.axis.encoding.ser.EnumSerializerFactory.class; java.lang.Class enumdf = org.apache.axis.encoding.ser.EnumDeserializerFactory.class; java.lang.Class arraysf = org.apache.axis.encoding.ser.ArraySerializerFactory.class; java.lang.Class arraydf = org.apache.axis.encoding.ser.ArrayDeserializerFactory.class; java.lang.Class simplesf = org.apache.axis.encoding.ser.SimpleSerializerFactory.class; java.lang.Class simpledf = org.apache.axis.encoding.ser.SimpleDeserializerFactory.class; java.lang.Class simplelistsf = org.apache.axis.encoding.ser.SimpleListSerializerFactory.class; java.lang.Class simplelistdf = org.apache.axis.encoding.ser.SimpleListDeserializerFactory.class; addBindings0(); addBindings1(); } private void addBindings0() { java.lang.Class cls; javax.xml.namespace.QName qName; javax.xml.namespace.QName qName2; java.lang.Class beansf = org.apache.axis.encoding.ser.BeanSerializerFactory.class; java.lang.Class beandf = org.apache.axis.encoding.ser.BeanDeserializerFactory.class; java.lang.Class enumsf = org.apache.axis.encoding.ser.EnumSerializerFactory.class; java.lang.Class enumdf = org.apache.axis.encoding.ser.EnumDeserializerFactory.class; java.lang.Class arraysf = org.apache.axis.encoding.ser.ArraySerializerFactory.class; java.lang.Class arraydf = org.apache.axis.encoding.ser.ArrayDeserializerFactory.class; java.lang.Class simplesf = org.apache.axis.encoding.ser.SimpleSerializerFactory.class; java.lang.Class simpledf = org.apache.axis.encoding.ser.SimpleDeserializerFactory.class; java.lang.Class simplelistsf = org.apache.axis.encoding.ser.SimpleListSerializerFactory.class; java.lang.Class simplelistdf = org.apache.axis.encoding.ser.SimpleListDeserializerFactory.class; qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ApiError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.ApiError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ApiException"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.ApiException.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ApiVersionError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.ApiVersionError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ApiVersionError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.ApiVersionErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ApplicationException"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.ApplicationException.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "AudienceExtensionError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.AudienceExtensionError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "AudienceExtensionError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.AudienceExtensionErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "AudienceSegmentError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.AudienceSegmentError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "AudienceSegmentError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.AudienceSegmentErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "AuthenticationError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.AuthenticationError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "AuthenticationError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.AuthenticationErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "BooleanValue"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.BooleanValue.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ClickTrackingLineItemError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.ClickTrackingLineItemError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ClickTrackingLineItemError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.ClickTrackingLineItemErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "CollectionSizeError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.CollectionSizeError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "CollectionSizeError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.CollectionSizeErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "CommonError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.CommonError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "CommonError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.CommonErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "CompanyCreditStatusError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.CompanyCreditStatusError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "CompanyCreditStatusError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.CompanyCreditStatusErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "CreativeError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.CreativeError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "CreativeError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.CreativeErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "CreativeRotationType"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.CreativeRotationType.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "CrossSellError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.CrossSellError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "CrossSellError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.CrossSellErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "CustomFieldValueError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.CustomFieldValueError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "CustomFieldValueError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.CustomFieldValueErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "CustomTargetingError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.CustomTargetingError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "CustomTargetingError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.CustomTargetingErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "Date"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.Date.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "DateTime"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.DateTime.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "DateTimeRangeTargetingError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.DateTimeRangeTargetingError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "DateTimeRangeTargetingError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.DateTimeRangeTargetingErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "DateTimeValue"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.DateTimeValue.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "DateValue"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.DateValue.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "DayPartTargetingError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.DayPartTargetingError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "DayPartTargetingError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.DayPartTargetingErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "DeliveryRateType"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.DeliveryRateType.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "EntityChildrenLimitReachedError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.EntityChildrenLimitReachedError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "EntityChildrenLimitReachedError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.EntityChildrenLimitReachedErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "EntityLimitReachedError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.EntityLimitReachedError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "EntityLimitReachedError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.EntityLimitReachedErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "FeatureError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.FeatureError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "FeatureError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.FeatureErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "FieldPathElement"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.FieldPathElement.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ForecastError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.ForecastError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ForecastError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.ForecastErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "FrequencyCapError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.FrequencyCapError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "FrequencyCapError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.FrequencyCapErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "GenericTargetingError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.GenericTargetingError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "GenericTargetingError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.GenericTargetingErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "GeoTargetingError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.GeoTargetingError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "GeoTargetingError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.GeoTargetingErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "GrpSettingsError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.GrpSettingsError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "GrpSettingsError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.GrpSettingsErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ImageError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.ImageError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ImageError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.ImageErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "InternalApiError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.InternalApiError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "InternalApiError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.InternalApiErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "InvalidUrlError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.InvalidUrlError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "InvalidUrlError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.InvalidUrlErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "InventoryTargetingError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.InventoryTargetingError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "InventoryTargetingError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.InventoryTargetingErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "LabelEntityAssociationError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.LabelEntityAssociationError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "LabelEntityAssociationError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.LabelEntityAssociationErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "LineItemActivityAssociationError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.LineItemActivityAssociationError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "LineItemActivityAssociationError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.LineItemActivityAssociationErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "LineItemCreativeAssociationError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.LineItemCreativeAssociationError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "LineItemCreativeAssociationError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.LineItemCreativeAssociationErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "LineItemError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.LineItemError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "LineItemError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.LineItemErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "LineItemFlightDateError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.LineItemFlightDateError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "LineItemFlightDateError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.LineItemFlightDateErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "LineItemOperationError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.LineItemOperationError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "LineItemOperationError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.LineItemOperationErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "LineItemTemplate"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.LineItemTemplate.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "LineItemTemplatePage"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.LineItemTemplatePage.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "LineItemType"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.LineItemType.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "MobileApplicationTargetingError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.MobileApplicationTargetingError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "MobileApplicationTargetingError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.MobileApplicationTargetingErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "NotNullError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.NotNullError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "NotNullError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.NotNullErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "NullError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.NullError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "NullError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.NullErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "NumberValue"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.NumberValue.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ObjectValue"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.ObjectValue.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "OrderActionError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.OrderActionError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "OrderActionError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.OrderActionErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "OrderError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.OrderError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "OrderError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.OrderErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ParseError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.ParseError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ParseError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.ParseErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "PermissionError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.PermissionError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "PermissionError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.PermissionErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ProgrammaticError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.ProgrammaticError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ProgrammaticError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.ProgrammaticErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "PublisherQueryLanguageContextError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.PublisherQueryLanguageContextError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "PublisherQueryLanguageContextError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.PublisherQueryLanguageContextErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "PublisherQueryLanguageSyntaxError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.PublisherQueryLanguageSyntaxError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "PublisherQueryLanguageSyntaxError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.PublisherQueryLanguageSyntaxErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); } private void addBindings1() { java.lang.Class cls; javax.xml.namespace.QName qName; javax.xml.namespace.QName qName2; java.lang.Class beansf = org.apache.axis.encoding.ser.BeanSerializerFactory.class; java.lang.Class beandf = org.apache.axis.encoding.ser.BeanDeserializerFactory.class; java.lang.Class enumsf = org.apache.axis.encoding.ser.EnumSerializerFactory.class; java.lang.Class enumdf = org.apache.axis.encoding.ser.EnumDeserializerFactory.class; java.lang.Class arraysf = org.apache.axis.encoding.ser.ArraySerializerFactory.class; java.lang.Class arraydf = org.apache.axis.encoding.ser.ArrayDeserializerFactory.class; java.lang.Class simplesf = org.apache.axis.encoding.ser.SimpleSerializerFactory.class; java.lang.Class simpledf = org.apache.axis.encoding.ser.SimpleDeserializerFactory.class; java.lang.Class simplelistsf = org.apache.axis.encoding.ser.SimpleListSerializerFactory.class; java.lang.Class simplelistdf = org.apache.axis.encoding.ser.SimpleListDeserializerFactory.class; qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "QuotaError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.QuotaError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "QuotaError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.QuotaErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "RangeError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.RangeError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "RangeError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.RangeErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "RegExError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.RegExError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "RegExError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.RegExErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "RequestPlatformTargetingError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.RequestPlatformTargetingError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "RequestPlatformTargetingError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.RequestPlatformTargetingErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "RequiredCollectionError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.RequiredCollectionError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "RequiredCollectionError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.RequiredCollectionErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "RequiredError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.RequiredError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "RequiredError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.RequiredErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "RequiredNumberError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.RequiredNumberError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "RequiredNumberError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.RequiredNumberErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "RequiredSizeError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.RequiredSizeError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "RequiredSizeError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.RequiredSizeErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ReservationDetailsError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.ReservationDetailsError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ReservationDetailsError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.ReservationDetailsErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "RoadblockingType"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.RoadblockingType.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ServerError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.ServerError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ServerError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.ServerErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "SetTopBoxLineItemError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.SetTopBoxLineItemError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "SetTopBoxLineItemError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.SetTopBoxLineItemErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "SetValue"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.SetValue.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "SoapRequestHeader"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.SoapRequestHeader.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "SoapResponseHeader"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.SoapResponseHeader.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "Statement"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.Statement.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "StatementError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.StatementError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "StatementError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.StatementErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "String_ValueMapEntry"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.String_ValueMapEntry.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "StringFormatError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.StringFormatError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "StringFormatError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.StringFormatErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "StringLengthError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.StringLengthError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "StringLengthError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.StringLengthErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "TeamError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.TeamError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "TeamError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.TeamErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "TechnologyTargetingError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.TechnologyTargetingError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "TechnologyTargetingError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.TechnologyTargetingErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "TextValue"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.TextValue.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "TimeZoneError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.TimeZoneError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "TimeZoneError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.TimeZoneErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "TypeError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.TypeError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "UniqueError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.UniqueError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "UserDomainTargetingError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.UserDomainTargetingError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "UserDomainTargetingError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.UserDomainTargetingErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "Value"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.Value.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "VideoPositionTargetingError"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.VideoPositionTargetingError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "VideoPositionTargetingError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.admanager.axis.v202111.VideoPositionTargetingErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); } protected org.apache.axis.client.Call createCall() throws java.rmi.RemoteException { try { org.apache.axis.client.Call _call = super._createCall(); if (super.maintainSessionSet) { _call.setMaintainSession(super.maintainSession); } if (super.cachedUsername != null) { _call.setUsername(super.cachedUsername); } if (super.cachedPassword != null) { _call.setPassword(super.cachedPassword); } if (super.cachedEndpoint != null) { _call.setTargetEndpointAddress(super.cachedEndpoint); } if (super.cachedTimeout != null) { _call.setTimeout(super.cachedTimeout); } if (super.cachedPortName != null) { _call.setPortName(super.cachedPortName); } java.util.Enumeration keys = super.cachedProperties.keys(); while (keys.hasMoreElements()) { java.lang.String key = (java.lang.String) keys.nextElement(); _call.setProperty(key, super.cachedProperties.get(key)); } // All the type mapping information is registered // when the first call is made. // The type mapping information is actually registered in // the TypeMappingRegistry of the service, which // is the reason why registration is only needed for the first call. synchronized (this) { if (firstCall()) { // must set encoding style before registering serializers _call.setEncodingStyle(null); for (int i = 0; i < cachedSerFactories.size(); ++i) { java.lang.Class cls = (java.lang.Class) cachedSerClasses.get(i); javax.xml.namespace.QName qName = (javax.xml.namespace.QName) cachedSerQNames.get(i); java.lang.Object x = cachedSerFactories.get(i); if (x instanceof Class) { java.lang.Class sf = (java.lang.Class) cachedSerFactories.get(i); java.lang.Class df = (java.lang.Class) cachedDeserFactories.get(i); _call.registerTypeMapping(cls, qName, sf, df, false); } else if (x instanceof javax.xml.rpc.encoding.SerializerFactory) { org.apache.axis.encoding.SerializerFactory sf = (org.apache.axis.encoding.SerializerFactory) cachedSerFactories.get(i); org.apache.axis.encoding.DeserializerFactory df = (org.apache.axis.encoding.DeserializerFactory) cachedDeserFactories.get(i); _call.registerTypeMapping(cls, qName, sf, df, false); } } } } return _call; } catch (java.lang.Throwable _t) { throw new org.apache.axis.AxisFault("Failure trying to get the Call object", _t); } } public com.google.api.ads.admanager.axis.v202111.LineItemTemplatePage getLineItemTemplatesByStatement(com.google.api.ads.admanager.axis.v202111.Statement filterStatement) throws java.rmi.RemoteException, com.google.api.ads.admanager.axis.v202111.ApiException { if (super.cachedEndpoint == null) { throw new org.apache.axis.NoEndPointException(); } org.apache.axis.client.Call _call = createCall(); _call.setOperation(_operations[0]); _call.setUseSOAPAction(true); _call.setSOAPActionURI(""); _call.setEncodingStyle(null); _call.setProperty(org.apache.axis.client.Call.SEND_TYPE_ATTR, Boolean.FALSE); _call.setProperty(org.apache.axis.AxisEngine.PROP_DOMULTIREFS, Boolean.FALSE); _call.setSOAPVersion(org.apache.axis.soap.SOAPConstants.SOAP11_CONSTANTS); _call.setOperationName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "getLineItemTemplatesByStatement")); setRequestHeaders(_call); setAttachments(_call); try { java.lang.Object _resp = _call.invoke(new java.lang.Object[] {filterStatement}); if (_resp instanceof java.rmi.RemoteException) { throw (java.rmi.RemoteException)_resp; } else { extractAttachments(_call); try { return (com.google.api.ads.admanager.axis.v202111.LineItemTemplatePage) _resp; } catch (java.lang.Exception _exception) { return (com.google.api.ads.admanager.axis.v202111.LineItemTemplatePage) org.apache.axis.utils.JavaUtils.convert(_resp, com.google.api.ads.admanager.axis.v202111.LineItemTemplatePage.class); } } } catch (org.apache.axis.AxisFault axisFaultException) { if (axisFaultException.detail != null) { if (axisFaultException.detail instanceof java.rmi.RemoteException) { throw (java.rmi.RemoteException) axisFaultException.detail; } if (axisFaultException.detail instanceof com.google.api.ads.admanager.axis.v202111.ApiException) { throw (com.google.api.ads.admanager.axis.v202111.ApiException) axisFaultException.detail; } } throw axisFaultException; } } }
googleads/googleads-java-lib
modules/dfp_axis/src/main/java/com/google/api/ads/admanager/axis/v202111/LineItemTemplateServiceSoapBindingStub.java
Java
apache-2.0
70,200
######## # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # * See the License for the specific language governing permissions and # * limitations under the License. from dsl_parser import (constants, models) from dsl_parser.elements import (imports, misc, plugins, node_types, node_templates, relationships, workflows, policies, data_types, version as _version) from dsl_parser.framework.elements import Element from dsl_parser.framework.requirements import Value class BlueprintVersionExtractor(Element): schema = { 'tosca_definitions_version': _version.ToscaDefinitionsVersion, # here so it gets version validated 'dsl_definitions': misc.DSLDefinitions, } requires = { _version.ToscaDefinitionsVersion: ['version', Value('plan_version')] } def parse(self, version, plan_version): return { 'version': version, 'plan_version': plan_version } class BlueprintImporter(Element): schema = { 'imports': imports.ImportsLoader, } requires = { imports.ImportsLoader: ['resource_base'] } def parse(self, resource_base): return { 'merged_blueprint': self.child(imports.ImportsLoader).value, 'resource_base': resource_base } class Blueprint(Element): schema = { 'tosca_definitions_version': _version.ToscaDefinitionsVersion, 'description': misc.Description, 'imports': imports.Imports, 'dsl_definitions': misc.DSLDefinitions, 'metadata': misc.Metadata, 'inputs': misc.Inputs, 'plugins': plugins.Plugins, 'node_types': node_types.NodeTypes, 'relationships': relationships.Relationships, 'node_templates': node_templates.NodeTemplates, 'policy_types': policies.PolicyTypes, 'policy_triggers': policies.PolicyTriggers, 'groups': policies.Groups, 'policies': policies.Policies, 'workflows': workflows.Workflows, 'outputs': misc.Outputs, 'data_types': data_types.DataTypes } requires = { node_templates.NodeTemplates: ['deployment_plugins_to_install'], workflows.Workflows: ['workflow_plugins_to_install'], policies.Policies: ['scaling_groups'] } def parse(self, workflow_plugins_to_install, deployment_plugins_to_install, scaling_groups): return models.Plan({ constants.DESCRIPTION: self.child(misc.Description).value, constants.METADATA: self.child(misc.Metadata).value, constants.NODES: self.child(node_templates.NodeTemplates).value, constants.RELATIONSHIPS: self.child( relationships.Relationships).value, constants.WORKFLOWS: self.child(workflows.Workflows).value, constants.POLICY_TYPES: self.child(policies.PolicyTypes).value, constants.POLICY_TRIGGERS: self.child(policies.PolicyTriggers).value, constants.POLICIES: self.child(policies.Policies).value, constants.GROUPS: self.child(policies.Groups).value, constants.SCALING_GROUPS: scaling_groups or {}, constants.INPUTS: self.child(misc.Inputs).value, constants.OUTPUTS: self.child(misc.Outputs).value, constants.DEPLOYMENT_PLUGINS_TO_INSTALL: deployment_plugins_to_install, constants.WORKFLOW_PLUGINS_TO_INSTALL: workflow_plugins_to_install, constants.VERSION: self.child( _version.ToscaDefinitionsVersion).value })
cloudify-cosmo/cloudify-dsl-parser
dsl_parser/elements/blueprint.py
Python
apache-2.0
4,469
// Copyright 2008-2016 Conrad Sanderson (http://conradsanderson.id.au) // Copyright 2008-2016 National ICT Australia (NICTA) // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ------------------------------------------------------------------------ //! \addtogroup fn_qr //! @{ //! QR decomposition template<typename T1> inline bool qr ( Mat<typename T1::elem_type>& Q, Mat<typename T1::elem_type>& R, const Base<typename T1::elem_type,T1>& X, const typename arma_blas_type_only<typename T1::elem_type>::result* junk = nullptr ) { arma_extra_debug_sigprint(); arma_ignore(junk); arma_debug_check( (&Q == &R), "qr(): Q and R are the same object" ); const bool status = auxlib::qr(Q, R, X); if(status == false) { Q.soft_reset(); R.soft_reset(); arma_debug_warn_level(3, "qr(): decomposition failed"); } return status; } //! economical QR decomposition template<typename T1> inline bool qr_econ ( Mat<typename T1::elem_type>& Q, Mat<typename T1::elem_type>& R, const Base<typename T1::elem_type,T1>& X, const typename arma_blas_type_only<typename T1::elem_type>::result* junk = nullptr ) { arma_extra_debug_sigprint(); arma_ignore(junk); arma_debug_check( (&Q == &R), "qr_econ(): Q and R are the same object" ); const bool status = auxlib::qr_econ(Q, R, X); if(status == false) { Q.soft_reset(); R.soft_reset(); arma_debug_warn_level(3, "qr_econ(): decomposition failed"); } return status; } //! QR decomposition with pivoting template<typename T1> inline typename enable_if2< is_supported_blas_type<typename T1::elem_type>::value, bool >::result qr ( Mat<typename T1::elem_type>& Q, Mat<typename T1::elem_type>& R, Mat<uword>& P, const Base<typename T1::elem_type,T1>& X, const char* P_mode = "matrix" ) { arma_extra_debug_sigprint(); arma_debug_check( (&Q == &R), "qr(): Q and R are the same object" ); const char sig = (P_mode != nullptr) ? P_mode[0] : char(0); arma_debug_check( ((sig != 'm') && (sig != 'v')), "qr(): argument 'P_mode' must be \"vector\" or \"matrix\"" ); bool status = false; if(sig == 'v') { status = auxlib::qr_pivot(Q, R, P, X); } else if(sig == 'm') { Mat<uword> P_vec; status = auxlib::qr_pivot(Q, R, P_vec, X); if(status) { // construct P const uword N = P_vec.n_rows; P.zeros(N,N); for(uword row=0; row < N; ++row) { P.at(P_vec[row], row) = uword(1); } } } if(status == false) { Q.soft_reset(); R.soft_reset(); P.soft_reset(); arma_debug_warn_level(3, "qr(): decomposition failed"); } return status; } //! @}
kumanna/armadillo-debian
include/armadillo_bits/fn_qr.hpp
C++
apache-2.0
3,371
// // ======================================================================== // Copyright (c) 1995-2014 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // All rights reserved. This program and the accompanying materials // are made available under the terms of the Eclipse Public License v1.0 // and Apache License v2.0 which accompanies this distribution. // // The Eclipse Public License is available at // http://www.eclipse.org/legal/epl-v10.html // // The Apache License v2.0 is available at // http://www.opensource.org/licenses/apache2.0.php // // You may elect to redistribute this code under either of these licenses. // ======================================================================== // package org.eclipse.jetty.monitor; import java.io.IOException; import java.util.HashSet; import java.util.Set; import javax.management.MBeanServerConnection; import org.eclipse.jetty.monitor.jmx.MonitorAction; import org.eclipse.jetty.monitor.jmx.MonitorTask; import org.eclipse.jetty.monitor.jmx.ServiceConnection; import org.eclipse.jetty.xml.XmlConfiguration; /* ------------------------------------------------------------ */ /** * JMXMonitor * * Performs monitoring of the values of the attributes of MBeans * and executes specified actions as well as sends notifications * of the specified events that have occurred. */ public class JMXMonitor { private static JMXMonitor __monitor = new JMXMonitor(); private String _serverUrl; private ServiceConnection _serviceConnection; private Set<MonitorAction> _actions = new HashSet<MonitorAction>(); /* ------------------------------------------------------------ */ /** * Constructs a JMXMonitor instance. Used for XML Configuration. * * !! DO NOT INSTANTIATE EXPLICITLY !! */ public JMXMonitor() {} /* ------------------------------------------------------------ */ /** * Adds monitor actions to the monitor * * @param actions monitor actions to add * @return true if successful */ public boolean addActions(MonitorAction... actions) { return getInstance().add(actions); } /* ------------------------------------------------------------ */ /** * Removes monitor actions from the monitor * * @param actions monitor actions to remove * @return true if successful */ public boolean removeActions(MonitorAction... actions) { return getInstance().remove(actions); } /* ------------------------------------------------------------ */ /** * Sets the JMX server URL * * @param url URL of the JMX server */ public void setUrl(String url) { getInstance().set(url); } public MBeanServerConnection getConnection() throws IOException { return getInstance().get(); } public static JMXMonitor getInstance() { return __monitor; } public static boolean addMonitorActions(MonitorAction... actions) { return getInstance().add(actions); } public static boolean removeMonitorActions(MonitorAction... actions) { return getInstance().remove(actions); } public static void setServiceUrl(String url) { getInstance().set(url); } /* ------------------------------------------------------------ */ /** * Retrieves a connection to JMX service * * @return server connection * @throws IOException */ public static MBeanServerConnection getServiceConnection() throws IOException { return getInstance().getConnection(); } public static void main(final String args[]) throws Exception { XmlConfiguration.main(args); } private synchronized boolean add(MonitorAction... actions) { boolean result = true; for (MonitorAction action : actions) { if (!_actions.add(action)) { result = false; } else { MonitorTask.schedule(action); } } return result; } private synchronized boolean remove(MonitorAction... actions) { boolean result = true; for (MonitorAction action : actions) { if (!_actions.remove(action)) { result = false; } MonitorTask.cancel(action); } return result; } private synchronized void set(String url) { _serverUrl = url; if (_serviceConnection != null) { _serviceConnection.disconnect(); _serviceConnection = null; } } private synchronized MBeanServerConnection get() throws IOException { if (_serviceConnection == null) { _serviceConnection = new ServiceConnection(_serverUrl); _serviceConnection.connect(); } return _serviceConnection.getConnection(); } }
sdw2330976/Research-jetty-9.2.5
jetty-monitor/src/main/java/org/eclipse/jetty/monitor/JMXMonitor.java
Java
apache-2.0
5,187
/* * Copyright 2015 LINE Corporation * * LINE Corporation licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.linecorp.armeria.server; import static java.util.Objects.requireNonNull; import java.net.IDN; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.function.Function; import java.util.regex.Pattern; import java.util.stream.Collectors; import javax.annotation.Nullable; import com.google.common.base.Ascii; import com.linecorp.armeria.common.http.HttpRequest; import com.linecorp.armeria.common.http.HttpResponse; import io.netty.handler.ssl.SslContext; import io.netty.util.DomainNameMapping; import io.netty.util.DomainNameMappingBuilder; /** * A <a href="https://en.wikipedia.org/wiki/Virtual_hosting#Name-based">name-based virtual host</a>. * A {@link VirtualHost} contains the following information: * <ul> * <li>the hostname pattern, as defined in * <a href="http://tools.ietf.org/html/rfc2818#section-3.1">the section 3.1 of RFC2818</a></li> * <li>{@link SslContext} if TLS is enabled</li> * <li>the list of available {@link Service}s and their {@link PathMapping}s</li> * </ul> * * @see VirtualHostBuilder */ public final class VirtualHost { private static final Pattern HOSTNAME_PATTERN = Pattern.compile( "^(?:[-_a-zA-Z0-9]|[-_a-zA-Z0-9][-_.a-zA-Z0-9]*[-_a-zA-Z0-9])$"); /** * Initialized later by {@link ServerConfig} via {@link #setServerConfig(ServerConfig)}. */ private ServerConfig serverConfig; private final String defaultHostname; private final String hostnamePattern; private final SslContext sslContext; private final List<ServiceConfig> services; private final PathMappings<ServiceConfig> serviceMapping = new PathMappings<>(); private String strVal; VirtualHost(String defaultHostname, String hostnamePattern, SslContext sslContext, Iterable<ServiceConfig> serviceConfigs) { defaultHostname = normalizeDefaultHostname(defaultHostname); hostnamePattern = normalizeHostnamePattern(hostnamePattern); ensureHostnamePatternMatchesDefaultHostname(hostnamePattern, defaultHostname); this.defaultHostname = defaultHostname; this.hostnamePattern = hostnamePattern; this.sslContext = validateSslContext(sslContext); requireNonNull(serviceConfigs, "serviceConfigs"); final List<ServiceConfig> servicesCopy = new ArrayList<>(); for (ServiceConfig c : serviceConfigs) { c = c.build(this); servicesCopy.add(c); serviceMapping.add(c.pathMapping(), c); } services = Collections.unmodifiableList(servicesCopy); serviceMapping.freeze(); } /** * IDNA ASCII conversion, case normalization and validation. */ static String normalizeDefaultHostname(String defaultHostname) { requireNonNull(defaultHostname, "defaultHostname"); if (needsNormalization(defaultHostname)) { defaultHostname = IDN.toASCII(defaultHostname, IDN.ALLOW_UNASSIGNED); } if (!HOSTNAME_PATTERN.matcher(defaultHostname).matches()) { throw new IllegalArgumentException("defaultHostname: " + defaultHostname); } return Ascii.toLowerCase(defaultHostname); } /** * IDNA ASCII conversion, case normalization and validation. */ static String normalizeHostnamePattern(String hostnamePattern) { requireNonNull(hostnamePattern, "hostnamePattern"); if (needsNormalization(hostnamePattern)) { hostnamePattern = IDN.toASCII(hostnamePattern, IDN.ALLOW_UNASSIGNED); } if (!"*".equals(hostnamePattern) && !HOSTNAME_PATTERN.matcher(hostnamePattern.startsWith("*.") ? hostnamePattern.substring(2) : hostnamePattern).matches()) { throw new IllegalArgumentException("hostnamePattern: " + hostnamePattern); } return Ascii.toLowerCase(hostnamePattern); } /** * Ensure that 'hostnamePattern' matches 'defaultHostname'. */ static void ensureHostnamePatternMatchesDefaultHostname(String hostnamePattern, String defaultHostname) { if ("*".equals(hostnamePattern)) { return; } // Pretty convoluted way to validate but it's done only once and // we don't need to duplicate the pattern matching logic. final DomainNameMapping<Boolean> mapping = new DomainNameMappingBuilder<>(Boolean.FALSE).add(hostnamePattern, Boolean.TRUE).build(); if (!mapping.map(defaultHostname)) { throw new IllegalArgumentException( "defaultHostname: " + defaultHostname + " (must be matched by hostnamePattern: " + hostnamePattern + ')'); } } private static boolean needsNormalization(String hostnamePattern) { final int length = hostnamePattern.length(); for (int i = 0; i < length; i++) { int c = hostnamePattern.charAt(i); if (c > 0x7F) { return true; } } return false; } static SslContext validateSslContext(SslContext sslContext) { if (sslContext != null && !sslContext.isServer()) { throw new IllegalArgumentException("sslContext: " + sslContext + " (expected: server context)"); } return sslContext; } /** * Returns the {@link Server} where this {@link VirtualHost} belongs to. */ public Server server() { if (serverConfig == null) { throw new IllegalStateException("server is not configured yet."); } return serverConfig.server(); } void setServerConfig(ServerConfig serverConfig) { if (this.serverConfig != null) { throw new IllegalStateException("VirtualHost cannot be added to more than one Server."); } this.serverConfig = requireNonNull(serverConfig, "serverConfig"); } /** * Returns the default hostname of this virtual host. */ public String defaultHostname() { return defaultHostname; } /** * Returns the hostname pattern of this virtual host, as defined in * <a href="http://tools.ietf.org/html/rfc2818#section-3.1">the section 3.1 of RFC2818</a> */ public String hostnamePattern() { return hostnamePattern; } /** * Returns the {@link SslContext} of this virtual host. */ public SslContext sslContext() { return sslContext; } /** * Returns the information about the {@link Service}s bound to this virtual host. */ public List<ServiceConfig> serviceConfigs() { return services; } /** * Finds the {@link Service} whose {@link PathMapping} matches the {@code path}. * * @param path an absolute path, as defined in <a href="https://tools.ietf.org/html/rfc3986">RFC3986</a> * @param query a query, as defined in <a href="https://tools.ietf.org/html/rfc3986">RFC3986</a>. * {@code null} if query does not exist. * * @return the {@link ServiceConfig} wrapped by a {@link PathMapped} if there's a match. * {@link PathMapped#empty()} if there's no match. */ public PathMapped<ServiceConfig> findServiceConfig(String path, @Nullable String query) { requireNonNull(path, "path"); return serviceMapping.apply(path, query); } VirtualHost decorate(@Nullable Function<Service<HttpRequest, HttpResponse>, Service<HttpRequest, HttpResponse>> decorator) { if (decorator == null) { return this; } final List<ServiceConfig> services = this.services.stream().map(cfg -> { final PathMapping pathMapping = cfg.pathMapping(); final Service<HttpRequest, HttpResponse> service = decorator.apply(cfg.service()); final String loggerName = cfg.loggerName().orElse(null); return new ServiceConfig(pathMapping, service, loggerName); }).collect(Collectors.toList()); return new VirtualHost(defaultHostname(), hostnamePattern(), sslContext(), services); } @Override public String toString() { String strVal = this.strVal; if (strVal == null) { this.strVal = strVal = toString( getClass(), defaultHostname(), hostnamePattern(), sslContext(), serviceConfigs()); } return strVal; } static String toString(Class<?> type, String defaultHostname, String hostnamePattern, SslContext sslContext, List<?> services) { StringBuilder buf = new StringBuilder(); if (type != null) { buf.append(type.getSimpleName()); } buf.append('('); buf.append(defaultHostname); buf.append('/'); buf.append(hostnamePattern); buf.append(", ssl: "); buf.append(sslContext != null); buf.append(", services: "); buf.append(services); buf.append(')'); return buf.toString(); } }
jonefeewang/armeria
core/src/main/java/com/linecorp/armeria/server/VirtualHost.java
Java
apache-2.0
9,840
package org.aieonf.commons.parser; import java.util.EventObject; public class ParseEvent<T extends Object> extends EventObject { private static final long serialVersionUID = 2113739661852090208L; private T data; public ParseEvent( Object source, T data ) { super( source ); } public T getData() { return data; } }
condast/AieonF
Workspace/org.aieonf.commons/src/org/aieonf/commons/parser/ParseEvent.java
Java
apache-2.0
358
package com.cs446.kluster.views.fragments; import java.util.Calendar; import android.app.DatePickerDialog; import android.app.Dialog; import android.app.DialogFragment; import android.app.TimePickerDialog; import android.os.Bundle; import android.text.Editable; import android.text.TextWatcher; import android.text.format.DateFormat; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.Button; import android.widget.DatePicker; import android.widget.EditText; import android.widget.TimePicker; import com.cs446.kluster.R; public class FilterDialogFragment extends DialogFragment { public static interface FilterListener { public void userSetFilter(String filter, String value); public void userReturned(); } private static FilterListener mListener; public void setFilterListener(FilterListener listener) { mListener = listener; } public FilterListener getFilterListener() { return mListener; } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View view = inflater.inflate(R.layout.filter_layout, container, false); getDialog().setTitle("Filter Search Results"); Button btnDate = (Button)view.findViewById(R.id.filter_btnDate); Button btnTime = (Button)view.findViewById(R.id.filter_btnTime); Button btnApply = (Button)view.findViewById(R.id.filter_btnApply); EditText txtContributor = (EditText)view.findViewById(R.id.filter_user); EditText txtTags = (EditText)view.findViewById(R.id.filter_tags); btnDate.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { DialogFragment fragment = new DatePickerFragment(); fragment.show(getFragmentManager(), "timePicker"); } }); btnTime.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { DialogFragment fragment = new TimePickerFragment(); fragment.show(getFragmentManager(), "timePicker"); } }); btnApply.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { mListener.userReturned(); getDialog().dismiss(); } }); txtContributor.addTextChangedListener(new TextWatcher() { @Override public void onTextChanged(CharSequence s, int start, int before, int count) {} @Override public void beforeTextChanged(CharSequence s, int start, int count,int after) { } @Override public void afterTextChanged(Editable s) { mListener.userSetFilter("eventname", s.toString()); } }); txtTags.addTextChangedListener(new TextWatcher() { @Override public void onTextChanged(CharSequence s, int start, int before, int count) {} @Override public void beforeTextChanged(CharSequence s, int start, int count,int after) { } @Override public void afterTextChanged(Editable s) { mListener.userSetFilter("tags", s.toString()); } }); return view; } public static class TimePickerFragment extends DialogFragment implements TimePickerDialog.OnTimeSetListener { @Override public Dialog onCreateDialog(Bundle savedInstanceState) { // Use the current time as the default values for the picker final Calendar c = Calendar.getInstance(); int hour = c.get(Calendar.HOUR_OF_DAY); int minute = c.get(Calendar.MINUTE); // Create a new instance of TimePickerDialog and return it return new TimePickerDialog(getActivity(), this, hour, minute, DateFormat.is24HourFormat(getActivity())); } public void onTimeSet(TimePicker view, int hourOfDay, int minute) { mListener.userSetFilter("time", String.format("%s:%s", hourOfDay, minute)); } } public static class DatePickerFragment extends DialogFragment implements DatePickerDialog.OnDateSetListener { @Override public Dialog onCreateDialog(Bundle savedInstanceState) { // Use the current date as the default date in the picker final Calendar c = Calendar.getInstance(); int year = c.get(Calendar.YEAR); int month = c.get(Calendar.MONTH); int day = c.get(Calendar.DAY_OF_MONTH); // Create a new instance of DatePickerDialog and return it return new DatePickerDialog(getActivity(), this, year, month, day); } public void onDateSet(DatePicker view, int year, int month, int day) { mListener.userSetFilter("date", String.format("%d/%d/%d", month, day, year)); } } }
nmcgill/kluster-android
Kluster/src/com/cs446/kluster/views/fragments/FilterDialogFragment.java
Java
apache-2.0
4,543
#include "libntptest.h" class atouintTest : public libntptest { }; TEST_F(atouintTest, RegularPositive) { const char *str = "305"; u_long actual; ASSERT_TRUE(atouint(str, &actual)); EXPECT_EQ(305, actual); } TEST_F(atouintTest, PositiveOverflowBoundary) { const char *str = "4294967296"; u_long actual; ASSERT_FALSE(atouint(str, &actual)); } TEST_F(atouintTest, PositiveOverflowBig) { const char *str = "8000000000"; u_long actual; ASSERT_FALSE(atouint(str, &actual)); } TEST_F(atouintTest, Negative) { const char *str = "-1"; u_long actual; ASSERT_FALSE(atouint(str, &actual)); } TEST_F(atouintTest, IllegalChar) { const char *str = "50c3"; u_long actual; ASSERT_FALSE(atouint(str, &actual)); }
execunix/vinos
external/bsd/ntp/dist/tests/libntp/atouint.cpp
C++
apache-2.0
723
package org.ovirt.engine.core.utils; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.junit.Test; import org.ovirt.engine.core.common.businessentities.Nameable; public class ReplacementUtilsTest { private static final String PROPERTY_NAME = "MY_SINGLE_ITEM_LIST"; private static final String PROPERTY_VALUE = "MY_SINGLE_ITEM_VALUE"; private static final String PROPERTY_COUNTER_NAME = "MY_SINGLE_ITEM_LIST_COUNTER"; @Test public void replaceWithSingleItem() { List<Object> items = Collections.<Object> singletonList(PROPERTY_VALUE); validateReplacements(ReplacementUtils.replaceWith(PROPERTY_NAME, items), items); } @Test public void replaceWithNullItem() { List<Object> items = Collections.singletonList(null); Collection<String> replacements = ReplacementUtils.replaceWith(PROPERTY_NAME, items); validateReplacementsContainsExpectedProperties(replacements, items); assertTrue(validateReplacementContains(replacements, "null")); } @Test public void replaceWithNameableCollection() { Nameable item = new Nameable() { @Override public String getName() { return PROPERTY_VALUE; } }; List<Nameable> items = Collections.singletonList(item); validateReplacements(ReplacementUtils.replaceWithNameable(PROPERTY_NAME, items), items); } @Test public void replaceWithEmptyCollection() { Collection<String> replacements = ReplacementUtils.replaceWith(PROPERTY_NAME, Collections.emptyList()); validateReplacementsContainsExpectedProperties(replacements, Collections.emptyList()); } @Test public void replaceWithMoreThanMaxItems() { List<Object> items = createItems(); Collection<String> replacements = ReplacementUtils.replaceWith(PROPERTY_NAME, items); validateReplacementsContainsExpectedProperties(replacements, items); validateReplacementsDoNotContainUnexpectedItems(replacements, items); } @Test public void containLowerThanDefaultNumberOfElements() { List<Object> items = createItems(); String separator = "sep"; // Less than the default number of elements to show. int numOfElementsToShow = 3; Collection<String> replacements = ReplacementUtils.replaceWith(PROPERTY_NAME, items, separator , numOfElementsToShow); assertTrue(validateReplacementElementCount(replacements, separator, numOfElementsToShow)); } @Test(expected = IllegalArgumentException.class) public void separatorNotEmpty() { List<Object> items = createItems(); String separator = ""; ReplacementUtils.replaceWith(PROPERTY_NAME, items, separator , 5); } @Test(expected = IllegalArgumentException.class) public void separatorNotNull() { List<Object> items = createItems(); String separator = null; ReplacementUtils.replaceWith(PROPERTY_NAME, items, separator, 5); } @Test(expected = IllegalArgumentException.class) public void failZeroValuesToShow() { List<Object> items = createItems(); String separator = ", "; ReplacementUtils.replaceWith(PROPERTY_NAME, items, separator , 0); } @Test(expected = IllegalArgumentException.class) public void failNegativeNumOfValuesToShow() { List<Object> items = createItems(); String separator = ", "; ReplacementUtils.replaceWith(PROPERTY_NAME, items, separator , -5); } @Test public void containBiggerThanDefaultNumberOfElements() { List<Object> items = createItems(); String separator = "sep"; // More than the default number of elements to show. int numOfElementsToShow = 8; Collection<String> replacements = ReplacementUtils.replaceWith(PROPERTY_NAME, items, separator , numOfElementsToShow); assertTrue(validateReplacementElementCount(replacements, separator, numOfElementsToShow)); } private boolean validateReplacementElementCount(Collection<String> replacements, String separator, int numOfElementsToShow) { String replacement = replacements.iterator().next(); String[] values = replacement.split(separator); int numOfElementsFound = 0; for (int i = 0; i < values.length; i++) { if (values[i].contains( PROPERTY_NAME )) { numOfElementsFound++; } } return numOfElementsFound == numOfElementsToShow; } private <T> void validateReplacementsContainsExpectedProperties(Collection<String> replacements, List<T> items) { assertNotNull(replacements); assertEquals(2, replacements.size()); assertTrue(validateReplacementContains(replacements, "$" + PROPERTY_NAME + " ")); assertTrue(validateReplacementContains(replacements, "$" + PROPERTY_COUNTER_NAME + " ")); assertTrue(validateReplacementContains(replacements, String.valueOf(items.size()))); } private <T> void validateReplacements(Collection<String> replacements, List<T> items) { validateReplacementsContainsExpectedProperties(replacements, items); assertTrue(validateReplacementContains(replacements, PROPERTY_VALUE)); } private boolean validateReplacementContains(Collection<String> replacements, String property) { Iterator<String> iterator = replacements.iterator(); while (iterator.hasNext()) { String replacement = iterator.next(); if (replacement.contains(property)) { return true; } } return false; } private void validateReplacementsDoNotContainUnexpectedItems(Collection<String> replacements, List<Object> items) { Iterator<String> iterator = replacements.iterator(); while (iterator.hasNext()) { String replacement = iterator.next(); for (int i = ReplacementUtils.DEFAULT_MAX_NUMBER_OF_PRINTED_ITEMS; i < items.size(); i++) { assertFalse(replacement.contains(buildPropertyValue(i))); } } } private List<Object> createItems() { List<Object> items = new ArrayList<>(ReplacementUtils.DEFAULT_MAX_NUMBER_OF_PRINTED_ITEMS * 2); for (int i = 0; i < ReplacementUtils.DEFAULT_MAX_NUMBER_OF_PRINTED_ITEMS * 2; i++) { items.add(buildPropertyValue(i)); } return items; } private String buildPropertyValue(int id) { return PROPERTY_NAME + String.valueOf(id); } }
OpenUniversity/ovirt-engine
backend/manager/modules/utils/src/test/java/org/ovirt/engine/core/utils/ReplacementUtilsTest.java
Java
apache-2.0
6,849
/* * Copyright 2014 Miles Chaston * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.chaston.oakfunds.model.ui; import com.google.inject.Inject; import org.chaston.oakfunds.model.Model; import org.chaston.oakfunds.model.ModelManager; import org.chaston.oakfunds.storage.StorageException; import org.chaston.oakfunds.util.JSONUtils; import org.chaston.oakfunds.util.RequestHandler; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; /** * TODO(mchaston): write JavaDocs */ class ModelListServlet extends HttpServlet { private final RequestHandler requestHandler; private final ModelManager modelManager; @Inject ModelListServlet(RequestHandler requestHandler, ModelManager modelManager) { this.requestHandler = requestHandler; this.modelManager = modelManager; } @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { Iterable<Model> models = requestHandler.handle(request, response, new RequestHandler.Action<Iterable<Model>>() { @Override public Iterable<Model> doAction(HttpServletRequest request) throws StorageException, ServletException { return modelManager.getModels(); } }); // Write result to response. response.setContentType("application/json"); JSONUtils.writeJSONString(response.getWriter(), models); } }
mchaston/OakFunds
src/org/chaston/oakfunds/model/ui/ModelListServlet.java
Java
apache-2.0
2,132
from setuptools import setup setup(name='flightPredict', version='0.3', description='Python Library for SPark MLLIB Flight Predict sample application', url='git+https://github.com/ibm-watson-data-lab/simple-data-pipe-connector-flightstats.git', author='David Taieb', author_email='david_taieb@us.ibm.com', license='Apache 2.0', packages=['flightPredict'], zip_safe=False)
ibm-cds-labs/simple-data-pipe-connector-flightstats
setup.py
Python
apache-2.0
420
/* Copyright 2015 Gravitational, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package ui const ( // WebConfigAuthProviderOIDCType is OIDC provider type WebConfigAuthProviderOIDCType = "oidc" // WebConfigAuthProviderOIDCURL is OIDC webapi endpoint WebConfigAuthProviderOIDCURL = "/v1/webapi/oidc/login/web?redirect_url=:redirect&connector_id=:providerName" // WebConfigAuthProviderSAMLType is SAML provider type WebConfigAuthProviderSAMLType = "saml" // WebConfigAuthProviderSAMLURL is SAML webapi endpoint WebConfigAuthProviderSAMLURL = "/v1/webapi/saml/sso?redirect_url=:redirect&connector_id=:providerName" // WebConfigAuthProviderGitHubType is GitHub provider type WebConfigAuthProviderGitHubType = "github" // WebConfigAuthProviderGitHubURL is GitHub webapi endpoint WebConfigAuthProviderGitHubURL = "/v1/webapi/github/login/web?redirect_url=:redirect&connector_id=:providerName" ) // WebConfig is web application configuration type WebConfig struct { // Auth contains Teleport auth. preferences Auth WebConfigAuthSettings `json:"auth,omitempty"` // CanJoinSessions disables joining sessions CanJoinSessions bool `json:"canJoinSessions"` } // WebConfigAuthProvider describes auth. provider type WebConfigAuthProvider struct { // Name is this provider ID Name string `json:"name,omitempty"` // DisplayName is this provider display name DisplayName string `json:"displayName,omitempty"` // Type is this provider type Type string `json:"type,omitempty"` // WebAPIURL is this provider webapi URL WebAPIURL string `json:"url,omitempty"` } // WebConfigAuthSettings describes auth configuration type WebConfigAuthSettings struct { // SecondFactor is the type of second factor to use in authentication. SecondFactor string `json:"second_factor,omitempty"` // Providers contains a list of configured auth providers Providers []WebConfigAuthProvider `json:"providers,omitempty"` }
yacloud-io/teleport
lib/web/ui/webconfig.go
GO
apache-2.0
2,398
/** * ASM: a very small and fast Java bytecode manipulation framework * Copyright (c) 2000-2011 INRIA, France Telecom * All rights reserved. * <p/> * Redistribution and use in srccode and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of srccode code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the className of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * <p/> * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. */ package act.asm; /** * Information about the input and output stack map frames of a basic block. * * @author Eric Bruneton */ final class Frame { /* * Frames are computed in a two steps process: during the visit of each * instruction, the state of the frame at the end of current basic block is * updated by simulating the action of the instruction on the previous state * of this so called "output frame". In visitMaxs, a fix point algorithm is * used to compute the "input frame" of each basic block, i.e. the stack map * frame at the beginning of the basic block, starting from the input frame * of the first basic block (which is computed from the method descriptor), * and by using the previously computed output frames to compute the input * state of the other blocks. * * All output and input frames are stored as arrays of integers. Reference * and array types are represented by an index into a type table (which is * not the same as the constant pool of the class, in order to avoid adding * unnecessary constants in the pool - not all computed frames will end up * being stored in the stack map table). This allows very fast type * comparisons. * * Output stack map frames are computed relatively to the input frame of the * basic block, which is not yet known when output frames are computed. It * is therefore necessary to be able to represent abstract types such as * "the type at position x in the input frame locals" or "the type at * position x from the top of the input frame stack" or even "the type at * position x in the input frame, with y more (or less) array dimensions". * This explains the rather complicated type format used in output frames. * * This format is the following: DIM KIND VALUE (4, 4 and 24 bits). DIM is a * signed number of array dimensions (from -8 to 7). KIND is either BASE, * LOCAL or STACK. BASE is used for types that are not relative to the input * frame. LOCAL is used for types that are relative to the input local * variable types. STACK is used for types that are relative to the input * stack types. VALUE depends on KIND. For LOCAL types, it is an index in * the input local variable types. For STACK types, it is a position * relatively to the top of input frame stack. For BASE types, it is either * one of the constants defined below, or for OBJECT and UNINITIALIZED * types, a tag and an index in the type table. * * Output frames can contain types of any kind and with a positive or * negative dimension (and even unassigned types, represented by 0 - which * does not correspond to any valid type value). Input frames can only * contain BASE types of positive or null dimension. In all cases the type * table contains only internal type names (array type descriptors are * forbidden - dimensions must be represented through the DIM field). * * The LONG and DOUBLE types are always represented by using two slots (LONG * + TOP or DOUBLE + TOP), for local variable types as well as in the * operand stack. This is necessary to be able to simulate DUPx_y * instructions, whose effect would be dependent on the actual type values * if types were always represented by a single slot in the stack (and this * is not possible, since actual type values are not always known - cf LOCAL * and STACK type kinds). */ /** * Mask to get the dimension of a frame type. This dimension is a signed * integer between -8 and 7. */ static final int DIM = 0xF0000000; /** * Constant to be added to a type to get a type with one more dimension. */ static final int ARRAY_OF = 0x10000000; /** * Constant to be added to a type to get a type with one less dimension. */ static final int ELEMENT_OF = 0xF0000000; /** * Mask to get the kind of a frame type. * * @see #BASE * @see #LOCAL * @see #STACK */ static final int KIND = 0xF000000; /** * Flag used for LOCAL and STACK types. Indicates that if this type happens * to be a long or double type (during the computations of input frames), * then it must be set to TOP because the second word of this value has been * reused to store other data in the basic block. Hence the first word no * longer stores a valid long or double value. */ static final int TOP_IF_LONG_OR_DOUBLE = 0x800000; /** * Mask to get the value of a frame type. */ static final int VALUE = 0x7FFFFF; /** * Mask to get the kind of base types. */ static final int BASE_KIND = 0xFF00000; /** * Mask to get the value of base types. */ static final int BASE_VALUE = 0xFFFFF; /** * Kind of the types that are not relative to an input stack map frame. */ static final int BASE = 0x1000000; /** * Base kind of the base reference types. The BASE_VALUE of such types is an * index into the type table. */ static final int OBJECT = BASE | 0x700000; /** * Base kind of the uninitialized base types. The BASE_VALUE of such types * in an index into the type table (the Item at that index contains both an * instruction offset and an internal class className). */ static final int UNINITIALIZED = BASE | 0x800000; /** * Kind of the types that are relative to the local variable types of an * input stack map frame. The value of such types is a local variable index. */ private static final int LOCAL = 0x2000000; /** * Kind of the the types that are relative to the stack of an input stack * map frame. The value of such types is a position relatively to the top of * this stack. */ private static final int STACK = 0x3000000; /** * The TOP type. This is a BASE type. */ static final int TOP = BASE | 0; /** * The BOOLEAN type. This is a BASE type mainly used for array types. */ static final int BOOLEAN = BASE | 9; /** * The BYTE type. This is a BASE type mainly used for array types. */ static final int BYTE = BASE | 10; /** * The CHAR type. This is a BASE type mainly used for array types. */ static final int CHAR = BASE | 11; /** * The SHORT type. This is a BASE type mainly used for array types. */ static final int SHORT = BASE | 12; /** * The INTEGER type. This is a BASE type. */ static final int INTEGER = BASE | 1; /** * The FLOAT type. This is a BASE type. */ static final int FLOAT = BASE | 2; /** * The DOUBLE type. This is a BASE type. */ static final int DOUBLE = BASE | 3; /** * The LONG type. This is a BASE type. */ static final int LONG = BASE | 4; /** * The NULL type. This is a BASE type. */ static final int NULL = BASE | 5; /** * The UNINITIALIZED_THIS type. This is a BASE type. */ static final int UNINITIALIZED_THIS = BASE | 6; /** * The stack size variation corresponding to each JVM instruction. This * stack variation is equal to the size of the values produced by an * instruction, minus the size of the values consumed by this instruction. */ static final int[] SIZE; /** * Computes the stack size variation corresponding to each JVM instruction. */ static { int i; int[] b = new int[202]; String s = "EFFFFFFFFGGFFFGGFFFEEFGFGFEEEEEEEEEEEEEEEEEEEEDEDEDDDDD" + "CDCDEEEEEEEEEEEEEEEEEEEEBABABBBBDCFFFGGGEDCDCDCDCDCDCDCDCD" + "CDCEEEEDDDDDDDCDCDCEFEFDDEEFFDEDEEEBDDBBDDDDDDCCCCCCCCEFED" + "DDCDCDEEEEEEEEEEFEEEEEEDDEEDDEE"; for (i = 0; i < b.length; ++i) { b[i] = s.charAt(i) - 'E'; } SIZE = b; // code to generate the above string // // int NA = 0; // not applicable (unused opcode or variable size opcode) // // b = new int[] { // 0, //NOP, // visitInsn // 1, //ACONST_NULL, // - // 1, //ICONST_M1, // - // 1, //ICONST_0, // - // 1, //ICONST_1, // - // 1, //ICONST_2, // - // 1, //ICONST_3, // - // 1, //ICONST_4, // - // 1, //ICONST_5, // - // 2, //LCONST_0, // - // 2, //LCONST_1, // - // 1, //FCONST_0, // - // 1, //FCONST_1, // - // 1, //FCONST_2, // - // 2, //DCONST_0, // - // 2, //DCONST_1, // - // 1, //BIPUSH, // visitIntInsn // 1, //SIPUSH, // - // 1, //LDC, // visitLdcInsn // NA, //LDC_W, // - // NA, //LDC2_W, // - // 1, //ILOAD, // visitVarInsn // 2, //LLOAD, // - // 1, //FLOAD, // - // 2, //DLOAD, // - // 1, //ALOAD, // - // NA, //ILOAD_0, // - // NA, //ILOAD_1, // - // NA, //ILOAD_2, // - // NA, //ILOAD_3, // - // NA, //LLOAD_0, // - // NA, //LLOAD_1, // - // NA, //LLOAD_2, // - // NA, //LLOAD_3, // - // NA, //FLOAD_0, // - // NA, //FLOAD_1, // - // NA, //FLOAD_2, // - // NA, //FLOAD_3, // - // NA, //DLOAD_0, // - // NA, //DLOAD_1, // - // NA, //DLOAD_2, // - // NA, //DLOAD_3, // - // NA, //ALOAD_0, // - // NA, //ALOAD_1, // - // NA, //ALOAD_2, // - // NA, //ALOAD_3, // - // -1, //IALOAD, // visitInsn // 0, //LALOAD, // - // -1, //FALOAD, // - // 0, //DALOAD, // - // -1, //AALOAD, // - // -1, //BALOAD, // - // -1, //CALOAD, // - // -1, //SALOAD, // - // -1, //ISTORE, // visitVarInsn // -2, //LSTORE, // - // -1, //FSTORE, // - // -2, //DSTORE, // - // -1, //ASTORE, // - // NA, //ISTORE_0, // - // NA, //ISTORE_1, // - // NA, //ISTORE_2, // - // NA, //ISTORE_3, // - // NA, //LSTORE_0, // - // NA, //LSTORE_1, // - // NA, //LSTORE_2, // - // NA, //LSTORE_3, // - // NA, //FSTORE_0, // - // NA, //FSTORE_1, // - // NA, //FSTORE_2, // - // NA, //FSTORE_3, // - // NA, //DSTORE_0, // - // NA, //DSTORE_1, // - // NA, //DSTORE_2, // - // NA, //DSTORE_3, // - // NA, //ASTORE_0, // - // NA, //ASTORE_1, // - // NA, //ASTORE_2, // - // NA, //ASTORE_3, // - // -3, //IASTORE, // visitInsn // -4, //LASTORE, // - // -3, //FASTORE, // - // -4, //DASTORE, // - // -3, //AASTORE, // - // -3, //BASTORE, // - // -3, //CASTORE, // - // -3, //SASTORE, // - // -1, //POP, // - // -2, //POP2, // - // 1, //DUP, // - // 1, //DUP_X1, // - // 1, //DUP_X2, // - // 2, //DUP2, // - // 2, //DUP2_X1, // - // 2, //DUP2_X2, // - // 0, //SWAP, // - // -1, //IADD, // - // -2, //LADD, // - // -1, //FADD, // - // -2, //DADD, // - // -1, //ISUB, // - // -2, //LSUB, // - // -1, //FSUB, // - // -2, //DSUB, // - // -1, //IMUL, // - // -2, //LMUL, // - // -1, //FMUL, // - // -2, //DMUL, // - // -1, //IDIV, // - // -2, //LDIV, // - // -1, //FDIV, // - // -2, //DDIV, // - // -1, //IREM, // - // -2, //LREM, // - // -1, //FREM, // - // -2, //DREM, // - // 0, //INEG, // - // 0, //LNEG, // - // 0, //FNEG, // - // 0, //DNEG, // - // -1, //ISHL, // - // -1, //LSHL, // - // -1, //ISHR, // - // -1, //LSHR, // - // -1, //IUSHR, // - // -1, //LUSHR, // - // -1, //IAND, // - // -2, //LAND, // - // -1, //IOR, // - // -2, //LOR, // - // -1, //IXOR, // - // -2, //LXOR, // - // 0, //IINC, // visitIincInsn // 1, //I2L, // visitInsn // 0, //I2F, // - // 1, //I2D, // - // -1, //L2I, // - // -1, //L2F, // - // 0, //L2D, // - // 0, //F2I, // - // 1, //F2L, // - // 1, //F2D, // - // -1, //D2I, // - // 0, //D2L, // - // -1, //D2F, // - // 0, //I2B, // - // 0, //I2C, // - // 0, //I2S, // - // -3, //LCMP, // - // -1, //FCMPL, // - // -1, //FCMPG, // - // -3, //DCMPL, // - // -3, //DCMPG, // - // -1, //IFEQ, // visitJumpInsn // -1, //IFNE, // - // -1, //IFLT, // - // -1, //IFGE, // - // -1, //IFGT, // - // -1, //IFLE, // - // -2, //IF_ICMPEQ, // - // -2, //IF_ICMPNE, // - // -2, //IF_ICMPLT, // - // -2, //IF_ICMPGE, // - // -2, //IF_ICMPGT, // - // -2, //IF_ICMPLE, // - // -2, //IF_ACMPEQ, // - // -2, //IF_ACMPNE, // - // 0, //GOTO, // - // 1, //JSR, // - // 0, //RET, // visitVarInsn // -1, //TABLESWITCH, // visiTableSwitchInsn // -1, //LOOKUPSWITCH, // visitLookupSwitch // -1, //IRETURN, // visitInsn // -2, //LRETURN, // - // -1, //FRETURN, // - // -2, //DRETURN, // - // -1, //ARETURN, // - // 0, //RETURN, // - // NA, //GETSTATIC, // visitFieldInsn // NA, //PUTSTATIC, // - // NA, //GETFIELD, // - // NA, //PUTFIELD, // - // NA, //INVOKEVIRTUAL, // visitMethodInsn // NA, //INVOKESPECIAL, // - // NA, //INVOKESTATIC, // - // NA, //INVOKEINTERFACE, // - // NA, //INVOKEDYNAMIC, // visitInvokeDynamicInsn // 1, //NEW, // visitTypeInsn // 0, //NEWARRAY, // visitIntInsn // 0, //ANEWARRAY, // visitTypeInsn // 0, //ARRAYLENGTH, // visitInsn // NA, //ATHROW, // - // 0, //CHECKCAST, // visitTypeInsn // 0, //INSTANCEOF, // - // -1, //MONITORENTER, // visitInsn // -1, //MONITOREXIT, // - // NA, //WIDE, // NOT VISITED // NA, //MULTIANEWARRAY, // visitMultiANewArrayInsn // -1, //IFNULL, // visitJumpInsn // -1, //IFNONNULL, // - // NA, //GOTO_W, // - // NA, //JSR_W, // - // }; // for (i = 0; i < b.length; ++i) { // System.err.print((char)('E' + b[i])); // } // System.err.println(); } /** * The label (i.e. basic block) to which these input and output stack map * frames correspond. */ Label owner; /** * The input stack map frame locals. */ int[] inputLocals; /** * The input stack map frame stack. */ int[] inputStack; /** * The output stack map frame locals. */ private int[] outputLocals; /** * The output stack map frame stack. */ private int[] outputStack; /** * Relative size of the output stack. The exact semantics of this field * depends on the algorithm that is used. * * When only the maximum stack size is computed, this field is the size of * the output stack relatively to the top of the input stack. * * When the stack map frames are completely computed, this field is the * actual number of types in {@link #outputStack}. */ private int outputStackTop; /** * Number of types that are initialized in the basic block. * * @see #initializations */ private int initializationCount; /** * The types that are initialized in the basic block. A constructor * invocation on an UNINITIALIZED or UNINITIALIZED_THIS type must replace * <i>every occurence</i> of this type in the local variables and in the * operand stack. This cannot be done during the first phase of the * algorithm since, during this phase, the local variables and the operand * stack are not completely computed. It is therefore necessary to store the * types on which constructors are invoked in the basic block, in order to * do this replacement during the second phase of the algorithm, where the * frames are fully computed. Note that this array can contain types that * are relative to input locals or to the input stack (see below for the * description of the algorithm). */ private int[] initializations; /** * Returns the output frame local variable type at the given index. * * @param local * the index of the local that must be returned. * @return the output frame local variable type at the given index. */ private int get(final int local) { if (outputLocals == null || local >= outputLocals.length) { // this local has never been assigned in this basic block, // so it is still equal to its value in the input frame return LOCAL | local; } else { int type = outputLocals[local]; if (type == 0) { // this local has never been assigned in this basic block, // so it is still equal to its value in the input frame type = outputLocals[local] = LOCAL | local; } return type; } } /** * Sets the output frame local variable type at the given index. * * @param local * the index of the local that must be set. * @param type * the value of the local that must be set. */ private void set(final int local, final int type) { // creates and/or resizes the output local variables array if necessary if (outputLocals == null) { outputLocals = new int[10]; } int n = outputLocals.length; if (local >= n) { int[] t = new int[Math.max(local + 1, 2 * n)]; System.arraycopy(outputLocals, 0, t, 0, n); outputLocals = t; } // sets the local variable outputLocals[local] = type; } /** * Pushes a new type onto the output frame stack. * * @param type * the type that must be pushed. */ private void push(final int type) { // creates and/or resizes the output stack array if necessary if (outputStack == null) { outputStack = new int[10]; } int n = outputStack.length; if (outputStackTop >= n) { int[] t = new int[Math.max(outputStackTop + 1, 2 * n)]; System.arraycopy(outputStack, 0, t, 0, n); outputStack = t; } // pushes the type on the output stack outputStack[outputStackTop++] = type; // updates the maximun height reached by the output stack, if needed int top = owner.inputStackTop + outputStackTop; if (top > owner.outputStackMax) { owner.outputStackMax = top; } } /** * Pushes a new type onto the output frame stack. * * @param cw * the ClassWriter to which this label belongs. * @param desc * the descriptor of the type to be pushed. Can also be a method * descriptor (in this case this method pushes its return type * onto the output frame stack). */ private void push(final ClassWriter cw, final String desc) { int type = type(cw, desc); if (type != 0) { push(type); if (type == LONG || type == DOUBLE) { push(TOP); } } } /** * Returns the int encoding of the given type. * * @param cw * the ClassWriter to which this label belongs. * @param desc * a type descriptor. * @return the int encoding of the given type. */ private static int type(final ClassWriter cw, final String desc) { String t; int index = desc.charAt(0) == '(' ? desc.indexOf(')') + 1 : 0; switch (desc.charAt(index)) { case 'V': return 0; case 'Z': case 'C': case 'B': case 'S': case 'I': return INTEGER; case 'F': return FLOAT; case 'J': return LONG; case 'D': return DOUBLE; case 'L': // stores the internal className, not the descriptor! t = desc.substring(index + 1, desc.length() - 1); return OBJECT | cw.addType(t); // case '[': default: // extracts the dimensions and the element type int data; int dims = index + 1; while (desc.charAt(dims) == '[') { ++dims; } switch (desc.charAt(dims)) { case 'Z': data = BOOLEAN; break; case 'C': data = CHAR; break; case 'B': data = BYTE; break; case 'S': data = SHORT; break; case 'I': data = INTEGER; break; case 'F': data = FLOAT; break; case 'J': data = LONG; break; case 'D': data = DOUBLE; break; // case 'L': default: // stores the internal className, not the descriptor t = desc.substring(dims + 1, desc.length() - 1); data = OBJECT | cw.addType(t); } return (dims - index) << 28 | data; } } /** * Pops a type from the output frame stack and returns its value. * * @return the type that has been popped from the output frame stack. */ private int pop() { if (outputStackTop > 0) { return outputStack[--outputStackTop]; } else { // if the output frame stack is empty, pops from the input stack return STACK | -(--owner.inputStackTop); } } /** * Pops the given number of types from the output frame stack. * * @param elements * the number of types that must be popped. */ private void pop(final int elements) { if (outputStackTop >= elements) { outputStackTop -= elements; } else { // if the number of elements to be popped is greater than the number // of elements in the output stack, clear it, and pops the remaining // elements from the input stack. owner.inputStackTop -= elements - outputStackTop; outputStackTop = 0; } } /** * Pops a type from the output frame stack. * * @param desc * the descriptor of the type to be popped. Can also be a method * descriptor (in this case this method pops the types * corresponding to the method arguments). */ private void pop(final String desc) { char c = desc.charAt(0); if (c == '(') { pop((Type.getArgumentsAndReturnSizes(desc) >> 2) - 1); } else if (c == 'J' || c == 'D') { pop(2); } else { pop(1); } } /** * Adds a new type to the list of types on which a constructor is invoked in * the basic block. * * @param var * a type on a which a constructor is invoked. */ private void init(final int var) { // creates and/or resizes the initializations array if necessary if (initializations == null) { initializations = new int[2]; } int n = initializations.length; if (initializationCount >= n) { int[] t = new int[Math.max(initializationCount + 1, 2 * n)]; System.arraycopy(initializations, 0, t, 0, n); initializations = t; } // stores the type to be initialized initializations[initializationCount++] = var; } /** * Replaces the given type with the appropriate type if it is one of the * types on which a constructor is invoked in the basic block. * * @param cw * the ClassWriter to which this label belongs. * @param t * a type * @return t or, if t is one of the types on which a constructor is invoked * in the basic block, the type corresponding to this constructor. */ private int init(final ClassWriter cw, final int t) { int s; if (t == UNINITIALIZED_THIS) { s = OBJECT | cw.addType(cw.thisName); } else if ((t & (DIM | BASE_KIND)) == UNINITIALIZED) { String type = cw.typeTable[t & BASE_VALUE].strVal1; s = OBJECT | cw.addType(type); } else { return t; } for (int j = 0; j < initializationCount; ++j) { int u = initializations[j]; int dim = u & DIM; int kind = u & KIND; if (kind == LOCAL) { u = dim + inputLocals[u & VALUE]; } else if (kind == STACK) { u = dim + inputStack[inputStack.length - (u & VALUE)]; } if (t == u) { return s; } } return t; } /** * Initializes the input frame of the first basic block from the method * descriptor. * * @param cw * the ClassWriter to which this label belongs. * @param access * the access flags of the method to which this label belongs. * @param args * the formal parameter types of this method. * @param maxLocals * the maximum number of local variables of this method. */ void initInputFrame(final ClassWriter cw, final int access, final Type[] args, final int maxLocals) { inputLocals = new int[maxLocals]; inputStack = new int[0]; int i = 0; if ((access & Opcodes.ACC_STATIC) == 0) { if ((access & MethodWriter.ACC_CONSTRUCTOR) == 0) { inputLocals[i++] = OBJECT | cw.addType(cw.thisName); } else { inputLocals[i++] = UNINITIALIZED_THIS; } } for (int j = 0; j < args.length; ++j) { int t = type(cw, args[j].getDescriptor()); inputLocals[i++] = t; if (t == LONG || t == DOUBLE) { inputLocals[i++] = TOP; } } while (i < maxLocals) { inputLocals[i++] = TOP; } } /** * Simulates the action of the given instruction on the output stack frame. * * @param opcode * the opcode of the instruction. * @param arg * the operand of the instruction, if any. * @param cw * the class writer to which this label belongs. * @param item * the operand of the instructions, if any. */ void execute(final int opcode, final int arg, final ClassWriter cw, final Item item) { int t1, t2, t3, t4; switch (opcode) { case Opcodes.NOP: case Opcodes.INEG: case Opcodes.LNEG: case Opcodes.FNEG: case Opcodes.DNEG: case Opcodes.I2B: case Opcodes.I2C: case Opcodes.I2S: case Opcodes.GOTO: case Opcodes.RETURN: break; case Opcodes.ACONST_NULL: push(NULL); break; case Opcodes.ICONST_M1: case Opcodes.ICONST_0: case Opcodes.ICONST_1: case Opcodes.ICONST_2: case Opcodes.ICONST_3: case Opcodes.ICONST_4: case Opcodes.ICONST_5: case Opcodes.BIPUSH: case Opcodes.SIPUSH: case Opcodes.ILOAD: push(INTEGER); break; case Opcodes.LCONST_0: case Opcodes.LCONST_1: case Opcodes.LLOAD: push(LONG); push(TOP); break; case Opcodes.FCONST_0: case Opcodes.FCONST_1: case Opcodes.FCONST_2: case Opcodes.FLOAD: push(FLOAT); break; case Opcodes.DCONST_0: case Opcodes.DCONST_1: case Opcodes.DLOAD: push(DOUBLE); push(TOP); break; case Opcodes.LDC: switch (item.type) { case ClassWriter.INT: push(INTEGER); break; case ClassWriter.LONG: push(LONG); push(TOP); break; case ClassWriter.FLOAT: push(FLOAT); break; case ClassWriter.DOUBLE: push(DOUBLE); push(TOP); break; case ClassWriter.CLASS: push(OBJECT | cw.addType("java/lang/Class")); break; case ClassWriter.STR: push(OBJECT | cw.addType("java/lang/String")); break; case ClassWriter.MTYPE: push(OBJECT | cw.addType("java/lang/invoke/MethodType")); break; // case ClassWriter.HANDLE_BASE + [1..9]: default: push(OBJECT | cw.addType("java/lang/invoke/MethodHandle")); } break; case Opcodes.ALOAD: push(get(arg)); break; case Opcodes.IALOAD: case Opcodes.BALOAD: case Opcodes.CALOAD: case Opcodes.SALOAD: pop(2); push(INTEGER); break; case Opcodes.LALOAD: case Opcodes.D2L: pop(2); push(LONG); push(TOP); break; case Opcodes.FALOAD: pop(2); push(FLOAT); break; case Opcodes.DALOAD: case Opcodes.L2D: pop(2); push(DOUBLE); push(TOP); break; case Opcodes.AALOAD: pop(1); t1 = pop(); push(ELEMENT_OF + t1); break; case Opcodes.ISTORE: case Opcodes.FSTORE: case Opcodes.ASTORE: t1 = pop(); set(arg, t1); if (arg > 0) { t2 = get(arg - 1); // if t2 is of kind STACK or LOCAL we cannot know its size! if (t2 == LONG || t2 == DOUBLE) { set(arg - 1, TOP); } else if ((t2 & KIND) != BASE) { set(arg - 1, t2 | TOP_IF_LONG_OR_DOUBLE); } } break; case Opcodes.LSTORE: case Opcodes.DSTORE: pop(1); t1 = pop(); set(arg, t1); set(arg + 1, TOP); if (arg > 0) { t2 = get(arg - 1); // if t2 is of kind STACK or LOCAL we cannot know its size! if (t2 == LONG || t2 == DOUBLE) { set(arg - 1, TOP); } else if ((t2 & KIND) != BASE) { set(arg - 1, t2 | TOP_IF_LONG_OR_DOUBLE); } } break; case Opcodes.IASTORE: case Opcodes.BASTORE: case Opcodes.CASTORE: case Opcodes.SASTORE: case Opcodes.FASTORE: case Opcodes.AASTORE: pop(3); break; case Opcodes.LASTORE: case Opcodes.DASTORE: pop(4); break; case Opcodes.POP: case Opcodes.IFEQ: case Opcodes.IFNE: case Opcodes.IFLT: case Opcodes.IFGE: case Opcodes.IFGT: case Opcodes.IFLE: case Opcodes.IRETURN: case Opcodes.FRETURN: case Opcodes.ARETURN: case Opcodes.TABLESWITCH: case Opcodes.LOOKUPSWITCH: case Opcodes.ATHROW: case Opcodes.MONITORENTER: case Opcodes.MONITOREXIT: case Opcodes.IFNULL: case Opcodes.IFNONNULL: pop(1); break; case Opcodes.POP2: case Opcodes.IF_ICMPEQ: case Opcodes.IF_ICMPNE: case Opcodes.IF_ICMPLT: case Opcodes.IF_ICMPGE: case Opcodes.IF_ICMPGT: case Opcodes.IF_ICMPLE: case Opcodes.IF_ACMPEQ: case Opcodes.IF_ACMPNE: case Opcodes.LRETURN: case Opcodes.DRETURN: pop(2); break; case Opcodes.DUP: t1 = pop(); push(t1); push(t1); break; case Opcodes.DUP_X1: t1 = pop(); t2 = pop(); push(t1); push(t2); push(t1); break; case Opcodes.DUP_X2: t1 = pop(); t2 = pop(); t3 = pop(); push(t1); push(t3); push(t2); push(t1); break; case Opcodes.DUP2: t1 = pop(); t2 = pop(); push(t2); push(t1); push(t2); push(t1); break; case Opcodes.DUP2_X1: t1 = pop(); t2 = pop(); t3 = pop(); push(t2); push(t1); push(t3); push(t2); push(t1); break; case Opcodes.DUP2_X2: t1 = pop(); t2 = pop(); t3 = pop(); t4 = pop(); push(t2); push(t1); push(t4); push(t3); push(t2); push(t1); break; case Opcodes.SWAP: t1 = pop(); t2 = pop(); push(t1); push(t2); break; case Opcodes.IADD: case Opcodes.ISUB: case Opcodes.IMUL: case Opcodes.IDIV: case Opcodes.IREM: case Opcodes.IAND: case Opcodes.IOR: case Opcodes.IXOR: case Opcodes.ISHL: case Opcodes.ISHR: case Opcodes.IUSHR: case Opcodes.L2I: case Opcodes.D2I: case Opcodes.FCMPL: case Opcodes.FCMPG: pop(2); push(INTEGER); break; case Opcodes.LADD: case Opcodes.LSUB: case Opcodes.LMUL: case Opcodes.LDIV: case Opcodes.LREM: case Opcodes.LAND: case Opcodes.LOR: case Opcodes.LXOR: pop(4); push(LONG); push(TOP); break; case Opcodes.FADD: case Opcodes.FSUB: case Opcodes.FMUL: case Opcodes.FDIV: case Opcodes.FREM: case Opcodes.L2F: case Opcodes.D2F: pop(2); push(FLOAT); break; case Opcodes.DADD: case Opcodes.DSUB: case Opcodes.DMUL: case Opcodes.DDIV: case Opcodes.DREM: pop(4); push(DOUBLE); push(TOP); break; case Opcodes.LSHL: case Opcodes.LSHR: case Opcodes.LUSHR: pop(3); push(LONG); push(TOP); break; case Opcodes.IINC: set(arg, INTEGER); break; case Opcodes.I2L: case Opcodes.F2L: pop(1); push(LONG); push(TOP); break; case Opcodes.I2F: pop(1); push(FLOAT); break; case Opcodes.I2D: case Opcodes.F2D: pop(1); push(DOUBLE); push(TOP); break; case Opcodes.F2I: case Opcodes.ARRAYLENGTH: case Opcodes.INSTANCEOF: pop(1); push(INTEGER); break; case Opcodes.LCMP: case Opcodes.DCMPL: case Opcodes.DCMPG: pop(4); push(INTEGER); break; case Opcodes.JSR: case Opcodes.RET: throw new RuntimeException( "JSR/RET are not supported with computeFrames option"); case Opcodes.GETSTATIC: push(cw, item.strVal3); break; case Opcodes.PUTSTATIC: pop(item.strVal3); break; case Opcodes.GETFIELD: pop(1); push(cw, item.strVal3); break; case Opcodes.PUTFIELD: pop(item.strVal3); pop(); break; case Opcodes.INVOKEVIRTUAL: case Opcodes.INVOKESPECIAL: case Opcodes.INVOKESTATIC: case Opcodes.INVOKEINTERFACE: pop(item.strVal3); if (opcode != Opcodes.INVOKESTATIC) { t1 = pop(); if (opcode == Opcodes.INVOKESPECIAL && item.strVal2.charAt(0) == '<') { init(t1); } } push(cw, item.strVal3); break; case Opcodes.INVOKEDYNAMIC: pop(item.strVal2); push(cw, item.strVal2); break; case Opcodes.NEW: push(UNINITIALIZED | cw.addUninitializedType(item.strVal1, arg)); break; case Opcodes.NEWARRAY: pop(); switch (arg) { case Opcodes.T_BOOLEAN: push(ARRAY_OF | BOOLEAN); break; case Opcodes.T_CHAR: push(ARRAY_OF | CHAR); break; case Opcodes.T_BYTE: push(ARRAY_OF | BYTE); break; case Opcodes.T_SHORT: push(ARRAY_OF | SHORT); break; case Opcodes.T_INT: push(ARRAY_OF | INTEGER); break; case Opcodes.T_FLOAT: push(ARRAY_OF | FLOAT); break; case Opcodes.T_DOUBLE: push(ARRAY_OF | DOUBLE); break; // case Opcodes.T_LONG: default: push(ARRAY_OF | LONG); break; } break; case Opcodes.ANEWARRAY: String s = item.strVal1; pop(); if (s.charAt(0) == '[') { push(cw, '[' + s); } else { push(ARRAY_OF | OBJECT | cw.addType(s)); } break; case Opcodes.CHECKCAST: s = item.strVal1; pop(); if (s.charAt(0) == '[') { push(cw, s); } else { push(OBJECT | cw.addType(s)); } break; // case Opcodes.MULTIANEWARRAY: default: pop(arg); push(cw, item.strVal1); break; } } /** * Merges the input frame of the given basic block with the input and output * frames of this basic block. Returns <tt>true</tt> if the input frame of * the given label has been changed by this operation. * * @param cw * the ClassWriter to which this label belongs. * @param frame * the basic block whose input frame must be updated. * @param edge * the kind of the {@link Edge} between this label and 'label'. * See {@link Edge#info}. * @return <tt>true</tt> if the input frame of the given label has been * changed by this operation. */ boolean merge(final ClassWriter cw, final Frame frame, final int edge) { boolean changed = false; int i, s, dim, kind, t; int nLocal = inputLocals.length; int nStack = inputStack.length; if (frame.inputLocals == null) { frame.inputLocals = new int[nLocal]; changed = true; } for (i = 0; i < nLocal; ++i) { if (outputLocals != null && i < outputLocals.length) { s = outputLocals[i]; if (s == 0) { t = inputLocals[i]; } else { dim = s & DIM; kind = s & KIND; if (kind == BASE) { t = s; } else { if (kind == LOCAL) { t = dim + inputLocals[s & VALUE]; } else { t = dim + inputStack[nStack - (s & VALUE)]; } if ((s & TOP_IF_LONG_OR_DOUBLE) != 0 && (t == LONG || t == DOUBLE)) { t = TOP; } } } } else { t = inputLocals[i]; } if (initializations != null) { t = init(cw, t); } changed |= merge(cw, t, frame.inputLocals, i); } if (edge > 0) { for (i = 0; i < nLocal; ++i) { t = inputLocals[i]; changed |= merge(cw, t, frame.inputLocals, i); } if (frame.inputStack == null) { frame.inputStack = new int[1]; changed = true; } changed |= merge(cw, edge, frame.inputStack, 0); return changed; } int nInputStack = inputStack.length + owner.inputStackTop; if (frame.inputStack == null) { frame.inputStack = new int[nInputStack + outputStackTop]; changed = true; } for (i = 0; i < nInputStack; ++i) { t = inputStack[i]; if (initializations != null) { t = init(cw, t); } changed |= merge(cw, t, frame.inputStack, i); } for (i = 0; i < outputStackTop; ++i) { s = outputStack[i]; dim = s & DIM; kind = s & KIND; if (kind == BASE) { t = s; } else { if (kind == LOCAL) { t = dim + inputLocals[s & VALUE]; } else { t = dim + inputStack[nStack - (s & VALUE)]; } if ((s & TOP_IF_LONG_OR_DOUBLE) != 0 && (t == LONG || t == DOUBLE)) { t = TOP; } } if (initializations != null) { t = init(cw, t); } changed |= merge(cw, t, frame.inputStack, nInputStack + i); } return changed; } /** * Merges the type at the given index in the given type array with the given * type. Returns <tt>true</tt> if the type array has been modified by this * operation. * * @param cw * the ClassWriter to which this label belongs. * @param t * the type with which the type array element must be merged. * @param types * an array of types. * @param index * the index of the type that must be merged in 'types'. * @return <tt>true</tt> if the type array has been modified by this * operation. */ private static boolean merge(final ClassWriter cw, int t, final int[] types, final int index) { int u = types[index]; if (u == t) { // if the types are equal, merge(u,t)=u, so there is no change return false; } if ((t & ~DIM) == NULL) { if (u == NULL) { return false; } t = NULL; } if (u == 0) { // if types[index] has never been assigned, merge(u,t)=t types[index] = t; return true; } int v; if ((u & BASE_KIND) == OBJECT || (u & DIM) != 0) { // if u is a reference type of any dimension if (t == NULL) { // if t is the NULL type, merge(u,t)=u, so there is no change return false; } else if ((t & (DIM | BASE_KIND)) == (u & (DIM | BASE_KIND))) { // if t and u have the same dimension and same base kind if ((u & BASE_KIND) == OBJECT) { // if t is also a reference type, and if u and t have the // same dimension merge(u,t) = dim(t) | common parent of the // element types of u and t v = (t & DIM) | OBJECT | cw.getMergedType(t & BASE_VALUE, u & BASE_VALUE); } else { // if u and t are array types, but not with the same element // type, merge(u,t) = dim(u) - 1 | java/lang/Object int vdim = ELEMENT_OF + (u & DIM); v = vdim | OBJECT | cw.addType("java/lang/Object"); } } else if ((t & BASE_KIND) == OBJECT || (t & DIM) != 0) { // if t is any other reference or array type, the merged type // is min(udim, tdim) | java/lang/Object, where udim is the // array dimension of u, minus 1 if u is an array type with a // primitive element type (and similarly for tdim). int tdim = (((t & DIM) == 0 || (t & BASE_KIND) == OBJECT) ? 0 : ELEMENT_OF) + (t & DIM); int udim = (((u & DIM) == 0 || (u & BASE_KIND) == OBJECT) ? 0 : ELEMENT_OF) + (u & DIM); v = Math.min(tdim, udim) | OBJECT | cw.addType("java/lang/Object"); } else { // if t is any other type, merge(u,t)=TOP v = TOP; } } else if (u == NULL) { // if u is the NULL type, merge(u,t)=t, // or TOP if t is not a reference type v = (t & BASE_KIND) == OBJECT || (t & DIM) != 0 ? t : TOP; } else { // if u is any other type, merge(u,t)=TOP whatever t v = TOP; } if (u != v) { types[index] = v; return true; } return false; } }
actframework/act-asm
src/main/java/act/asm/Frame.java
Java
apache-2.0
50,500
package origin import ( "crypto/tls" "crypto/x509" "errors" "fmt" "io/ioutil" "net/http" "net/url" "path" "github.com/RangelReale/osin" "github.com/RangelReale/osincli" "github.com/golang/glog" "github.com/pborman/uuid" kapi "k8s.io/kubernetes/pkg/api" kerrs "k8s.io/kubernetes/pkg/api/errors" "k8s.io/kubernetes/pkg/auth/authenticator" kuser "k8s.io/kubernetes/pkg/auth/user" "k8s.io/kubernetes/pkg/client/retry" knet "k8s.io/kubernetes/pkg/util/net" "k8s.io/kubernetes/pkg/util/sets" "k8s.io/kubernetes/plugin/pkg/auth/authenticator/request/union" "github.com/openshift/origin/pkg/auth/authenticator/challenger/passwordchallenger" "github.com/openshift/origin/pkg/auth/authenticator/challenger/placeholderchallenger" "github.com/openshift/origin/pkg/auth/authenticator/password/allowanypassword" "github.com/openshift/origin/pkg/auth/authenticator/password/basicauthpassword" "github.com/openshift/origin/pkg/auth/authenticator/password/denypassword" "github.com/openshift/origin/pkg/auth/authenticator/password/htpasswd" "github.com/openshift/origin/pkg/auth/authenticator/password/keystonepassword" "github.com/openshift/origin/pkg/auth/authenticator/password/ldappassword" "github.com/openshift/origin/pkg/auth/authenticator/redirector" "github.com/openshift/origin/pkg/auth/authenticator/request/basicauthrequest" "github.com/openshift/origin/pkg/auth/authenticator/request/headerrequest" "github.com/openshift/origin/pkg/auth/authenticator/request/x509request" "github.com/openshift/origin/pkg/auth/ldaputil" "github.com/openshift/origin/pkg/auth/oauth/external" "github.com/openshift/origin/pkg/auth/oauth/external/github" "github.com/openshift/origin/pkg/auth/oauth/external/gitlab" "github.com/openshift/origin/pkg/auth/oauth/external/google" "github.com/openshift/origin/pkg/auth/oauth/external/openid" "github.com/openshift/origin/pkg/auth/oauth/handlers" "github.com/openshift/origin/pkg/auth/oauth/registry" "github.com/openshift/origin/pkg/auth/server/csrf" "github.com/openshift/origin/pkg/auth/server/errorpage" "github.com/openshift/origin/pkg/auth/server/grant" "github.com/openshift/origin/pkg/auth/server/login" "github.com/openshift/origin/pkg/auth/server/selectprovider" "github.com/openshift/origin/pkg/auth/server/tokenrequest" "github.com/openshift/origin/pkg/auth/userregistry/identitymapper" configapi "github.com/openshift/origin/pkg/cmd/server/api" cmdutil "github.com/openshift/origin/pkg/cmd/util" oauthapi "github.com/openshift/origin/pkg/oauth/api" accesstokenregistry "github.com/openshift/origin/pkg/oauth/registry/oauthaccesstoken" accesstokenetcd "github.com/openshift/origin/pkg/oauth/registry/oauthaccesstoken/etcd" authorizetokenregistry "github.com/openshift/origin/pkg/oauth/registry/oauthauthorizetoken" authorizetokenetcd "github.com/openshift/origin/pkg/oauth/registry/oauthauthorizetoken/etcd" clientregistry "github.com/openshift/origin/pkg/oauth/registry/oauthclient" clientetcd "github.com/openshift/origin/pkg/oauth/registry/oauthclient/etcd" clientauthregistry "github.com/openshift/origin/pkg/oauth/registry/oauthclientauthorization" clientauthetcd "github.com/openshift/origin/pkg/oauth/registry/oauthclientauthorization/etcd" "github.com/openshift/origin/pkg/oauth/server/osinserver" "github.com/openshift/origin/pkg/oauth/server/osinserver/registrystorage" saoauth "github.com/openshift/origin/pkg/serviceaccounts/oauthclient" ) const ( OpenShiftOAuthAPIPrefix = "/oauth" openShiftLoginPrefix = "/login" openShiftApproveSubpath = "approve" OpenShiftOAuthCallbackPrefix = "/oauth2callback" OpenShiftWebConsoleClientID = "openshift-web-console" OpenShiftBrowserClientID = "openshift-browser-client" OpenShiftCLIClientID = "openshift-challenging-client" ) // WithOAuth decorates the given handler by serving the OAuth2 endpoints while // passing through all other requests to the given handler. func (c *AuthConfig) WithOAuth(handler http.Handler) (http.Handler, error) { baseMux := http.NewServeMux() mux := c.possiblyWrapMux(baseMux) // pass through all other requests mux.Handle("/", handler) clientStorage, err := clientetcd.NewREST(c.RESTOptionsGetter) if err != nil { return nil, err } clientRegistry := clientregistry.NewRegistry(clientStorage) combinedOAuthClientGetter := saoauth.NewServiceAccountOAuthClientGetter(c.KubeClient.Core(), c.KubeClient.Core(), c.OpenShiftClient, clientRegistry, oauthapi.GrantHandlerType(c.Options.GrantConfig.ServiceAccountMethod)) accessTokenStorage, err := accesstokenetcd.NewREST(c.RESTOptionsGetter, combinedOAuthClientGetter, c.EtcdBackends...) if err != nil { return nil, err } accessTokenRegistry := accesstokenregistry.NewRegistry(accessTokenStorage) authorizeTokenStorage, err := authorizetokenetcd.NewREST(c.RESTOptionsGetter, combinedOAuthClientGetter) if err != nil { return nil, err } authorizeTokenRegistry := authorizetokenregistry.NewRegistry(authorizeTokenStorage) clientAuthStorage, err := clientauthetcd.NewREST(c.RESTOptionsGetter, combinedOAuthClientGetter) if err != nil { return nil, err } clientAuthRegistry := clientauthregistry.NewRegistry(clientAuthStorage) errorPageHandler, err := c.getErrorHandler() if err != nil { glog.Fatal(err) } authRequestHandler, authHandler, authFinalizer, err := c.getAuthorizeAuthenticationHandlers(mux, errorPageHandler) if err != nil { glog.Fatal(err) } storage := registrystorage.New(accessTokenRegistry, authorizeTokenRegistry, combinedOAuthClientGetter, registry.NewUserConversion()) config := osinserver.NewDefaultServerConfig() if c.Options.TokenConfig.AuthorizeTokenMaxAgeSeconds > 0 { config.AuthorizationExpiration = c.Options.TokenConfig.AuthorizeTokenMaxAgeSeconds } if c.Options.TokenConfig.AccessTokenMaxAgeSeconds > 0 { config.AccessExpiration = c.Options.TokenConfig.AccessTokenMaxAgeSeconds } grantChecker := registry.NewClientAuthorizationGrantChecker(clientAuthRegistry) grantHandler := c.getGrantHandler(mux, authRequestHandler, combinedOAuthClientGetter, clientAuthRegistry) server := osinserver.New( config, storage, osinserver.AuthorizeHandlers{ handlers.NewAuthorizeAuthenticator( authRequestHandler, authHandler, errorPageHandler, ), handlers.NewGrantCheck( grantChecker, grantHandler, errorPageHandler, ), authFinalizer, }, osinserver.AccessHandlers{ handlers.NewDenyAccessAuthenticator(), }, osinserver.NewDefaultErrorHandler(), ) server.Install(mux, OpenShiftOAuthAPIPrefix) if err := CreateOrUpdateDefaultOAuthClients(c.Options.MasterPublicURL, c.AssetPublicAddresses, clientRegistry); err != nil { glog.Fatal(err) } browserClient, err := clientRegistry.GetClient(kapi.NewContext(), OpenShiftBrowserClientID) if err != nil { glog.Fatal(err) } osOAuthClientConfig := c.NewOpenShiftOAuthClientConfig(browserClient) osOAuthClientConfig.RedirectUrl = c.Options.MasterPublicURL + path.Join(OpenShiftOAuthAPIPrefix, tokenrequest.DisplayTokenEndpoint) osOAuthClient, _ := osincli.NewClient(osOAuthClientConfig) if len(*c.Options.MasterCA) > 0 { rootCAs, err := cmdutil.CertPoolFromFile(*c.Options.MasterCA) if err != nil { glog.Fatal(err) } osOAuthClient.Transport = knet.SetTransportDefaults(&http.Transport{ TLSClientConfig: &tls.Config{RootCAs: rootCAs}, }) } tokenRequestEndpoints := tokenrequest.NewEndpoints(c.Options.MasterPublicURL, osOAuthClient) tokenRequestEndpoints.Install(mux, OpenShiftOAuthAPIPrefix) // glog.Infof("oauth server configured as: %#v", server) // glog.Infof("auth handler: %#v", authHandler) // glog.Infof("auth request handler: %#v", authRequestHandler) // glog.Infof("grant checker: %#v", grantChecker) // glog.Infof("grant handler: %#v", grantHandler) return baseMux, nil } func (c *AuthConfig) possiblyWrapMux(mux cmdutil.Mux) cmdutil.Mux { // Register directly into the given mux if c.HandlerWrapper == nil { return mux } // Wrap all handlers before registering into the container's mux // This lets us do things like defer session clearing to the end of a request return &handlerWrapperMux{ mux: mux, wrapper: c.HandlerWrapper, } } func (c *AuthConfig) getErrorHandler() (*errorpage.ErrorPage, error) { errorTemplate := "" if c.Options.Templates != nil { errorTemplate = c.Options.Templates.Error } errorPageRenderer, err := errorpage.NewErrorPageTemplateRenderer(errorTemplate) if err != nil { return nil, err } return errorpage.NewErrorPageHandler(errorPageRenderer), nil } // NewOpenShiftOAuthClientConfig provides config for OpenShift OAuth client func (c *AuthConfig) NewOpenShiftOAuthClientConfig(client *oauthapi.OAuthClient) *osincli.ClientConfig { config := &osincli.ClientConfig{ ClientId: client.Name, ClientSecret: client.Secret, ErrorsInStatusCode: true, SendClientSecretInParams: true, AuthorizeUrl: OpenShiftOAuthAuthorizeURL(c.Options.MasterPublicURL), TokenUrl: OpenShiftOAuthTokenURL(c.Options.MasterURL), Scope: "", } return config } func OpenShiftOAuthAuthorizeURL(masterAddr string) string { return masterAddr + path.Join(OpenShiftOAuthAPIPrefix, osinserver.AuthorizePath) } func OpenShiftOAuthTokenURL(masterAddr string) string { return masterAddr + path.Join(OpenShiftOAuthAPIPrefix, osinserver.TokenPath) } func OpenShiftOAuthTokenRequestURL(masterAddr string) string { return masterAddr + path.Join(OpenShiftOAuthAPIPrefix, tokenrequest.RequestTokenEndpoint) } func ensureOAuthClient(client oauthapi.OAuthClient, clientRegistry clientregistry.Registry, preserveExistingRedirects, preserveExistingSecret bool) error { ctx := kapi.NewContext() _, err := clientRegistry.CreateClient(ctx, &client) if err == nil || !kerrs.IsAlreadyExists(err) { return err } return retry.RetryOnConflict(retry.DefaultRetry, func() error { existing, err := clientRegistry.GetClient(ctx, client.Name) if err != nil { return err } // Ensure the correct challenge setting existing.RespondWithChallenges = client.RespondWithChallenges // Preserve an existing client secret if !preserveExistingSecret || len(existing.Secret) == 0 { existing.Secret = client.Secret } // Preserve redirects for clients other than the CLI client // The CLI client doesn't care about the redirect URL, just the token or error fragment if preserveExistingRedirects { // Add in any redirects from the existing one // This preserves any additional customized redirects in the default clients redirects := sets.NewString(client.RedirectURIs...) for _, redirect := range existing.RedirectURIs { if !redirects.Has(redirect) { client.RedirectURIs = append(client.RedirectURIs, redirect) redirects.Insert(redirect) } } } existing.RedirectURIs = client.RedirectURIs // If the GrantMethod is present, keep it for compatibility // If it is empty, assign the requested strategy. if len(existing.GrantMethod) == 0 { existing.GrantMethod = client.GrantMethod } _, err = clientRegistry.UpdateClient(ctx, existing) return err }) } func CreateOrUpdateDefaultOAuthClients(masterPublicAddr string, assetPublicAddresses []string, clientRegistry clientregistry.Registry) error { { webConsoleClient := oauthapi.OAuthClient{ ObjectMeta: kapi.ObjectMeta{Name: OpenShiftWebConsoleClientID}, Secret: "", RespondWithChallenges: false, RedirectURIs: assetPublicAddresses, GrantMethod: oauthapi.GrantHandlerAuto, } if err := ensureOAuthClient(webConsoleClient, clientRegistry, true, false); err != nil { return err } } { browserClient := oauthapi.OAuthClient{ ObjectMeta: kapi.ObjectMeta{Name: OpenShiftBrowserClientID}, Secret: uuid.New(), RespondWithChallenges: false, RedirectURIs: []string{masterPublicAddr + path.Join(OpenShiftOAuthAPIPrefix, tokenrequest.DisplayTokenEndpoint)}, GrantMethod: oauthapi.GrantHandlerAuto, } if err := ensureOAuthClient(browserClient, clientRegistry, true, true); err != nil { return err } } { cliClient := oauthapi.OAuthClient{ ObjectMeta: kapi.ObjectMeta{Name: OpenShiftCLIClientID}, Secret: "", RespondWithChallenges: true, RedirectURIs: []string{masterPublicAddr + path.Join(OpenShiftOAuthAPIPrefix, tokenrequest.ImplicitTokenEndpoint)}, GrantMethod: oauthapi.GrantHandlerAuto, } if err := ensureOAuthClient(cliClient, clientRegistry, false, false); err != nil { return err } } return nil } // getCSRF returns the object responsible for generating and checking CSRF tokens func (c *AuthConfig) getCSRF() csrf.CSRF { secure := isHTTPS(c.Options.MasterPublicURL) return csrf.NewCookieCSRF("csrf", "/", "", secure, true) } func (c *AuthConfig) getAuthorizeAuthenticationHandlers(mux cmdutil.Mux, errorHandler handlers.AuthenticationErrorHandler) (authenticator.Request, handlers.AuthenticationHandler, osinserver.AuthorizeHandler, error) { authRequestHandler, err := c.getAuthenticationRequestHandler() if err != nil { return nil, nil, nil, err } authHandler, err := c.getAuthenticationHandler(mux, errorHandler) if err != nil { return nil, nil, nil, err } authFinalizer := c.getAuthenticationFinalizer() return authRequestHandler, authHandler, authFinalizer, nil } // getGrantHandler returns the object that handles approving or rejecting grant requests func (c *AuthConfig) getGrantHandler(mux cmdutil.Mux, auth authenticator.Request, clientregistry clientregistry.Getter, authregistry clientauthregistry.Registry) handlers.GrantHandler { // check that the global default strategy is something we honor if !configapi.ValidGrantHandlerTypes.Has(string(c.Options.GrantConfig.Method)) { glog.Fatalf("No grant handler found that matches %v. The OAuth server cannot start!", c.Options.GrantConfig.Method) } // Since any OAuth client could require prompting, we will unconditionally // start the GrantServer here. grantServer := grant.NewGrant(c.getCSRF(), auth, grant.DefaultFormRenderer, clientregistry, authregistry) grantServer.Install(mux, path.Join(OpenShiftOAuthAPIPrefix, osinserver.AuthorizePath, openShiftApproveSubpath)) // Set defaults for standard clients. These can be overridden. return handlers.NewPerClientGrant( handlers.NewRedirectGrant(openShiftApproveSubpath), oauthapi.GrantHandlerType(c.Options.GrantConfig.Method), ) } // getAuthenticationFinalizer returns an authentication finalizer which is called just prior to writing a response to an authorization request func (c *AuthConfig) getAuthenticationFinalizer() osinserver.AuthorizeHandler { if c.SessionAuth != nil { // The session needs to know the authorize flow is done so it can invalidate the session return osinserver.AuthorizeHandlerFunc(func(ar *osin.AuthorizeRequest, resp *osin.Response, w http.ResponseWriter) (bool, error) { _ = c.SessionAuth.InvalidateAuthentication(w, ar.HttpRequest) return false, nil }) } // Otherwise return a no-op finalizer return osinserver.AuthorizeHandlerFunc(func(ar *osin.AuthorizeRequest, resp *osin.Response, w http.ResponseWriter) (bool, error) { return false, nil }) } func (c *AuthConfig) getAuthenticationHandler(mux cmdutil.Mux, errorHandler handlers.AuthenticationErrorHandler) (handlers.AuthenticationHandler, error) { // TODO: make this ordered once we can have more than one challengers := map[string]handlers.AuthenticationChallenger{} redirectors := new(handlers.AuthenticationRedirectors) // Determine if we have more than one password-based Identity Provider multiplePasswordProviders := false passwordProviderCount := 0 for _, identityProvider := range c.Options.IdentityProviders { if configapi.IsPasswordAuthenticator(identityProvider) && identityProvider.UseAsLogin { passwordProviderCount++ if passwordProviderCount > 1 { multiplePasswordProviders = true break } } } for _, identityProvider := range c.Options.IdentityProviders { identityMapper, err := identitymapper.NewIdentityUserMapper(c.IdentityRegistry, c.UserRegistry, identitymapper.MappingMethodType(identityProvider.MappingMethod)) if err != nil { return nil, err } // TODO: refactor handler building per type if configapi.IsPasswordAuthenticator(identityProvider) { passwordAuth, err := c.getPasswordAuthenticator(identityProvider) if err != nil { return nil, err } if identityProvider.UseAsLogin { // Password auth requires: // 1. a session success handler (to remember you logged in) // 2. a redirectSuccessHandler (to go back to the "then" param) if c.SessionAuth == nil { return nil, errors.New("SessionAuth is required for password-based login") } passwordSuccessHandler := handlers.AuthenticationSuccessHandlers{c.SessionAuth, redirectSuccessHandler{}} var ( // loginPath is unescaped, the way the mux will see it once URL-decoding is done loginPath = openShiftLoginPrefix // redirectLoginPath is escaped, the way we would need to send a Location redirect to a client redirectLoginPath = openShiftLoginPrefix ) if multiplePasswordProviders { // If there is more than one Identity Provider acting as a login // provider, we need to give each of them their own login path, // to avoid ambiguity. loginPath = path.Join(openShiftLoginPrefix, identityProvider.Name) // url-encode the provider name for redirecting redirectLoginPath = path.Join(openShiftLoginPrefix, (&url.URL{Path: identityProvider.Name}).String()) } // Since we're redirecting to a local login page, we don't need to force absolute URL resolution redirectors.Add(identityProvider.Name, redirector.NewRedirector(nil, redirectLoginPath+"?then=${url}")) var loginTemplateFile string if c.Options.Templates != nil { loginTemplateFile = c.Options.Templates.Login } loginFormRenderer, err := login.NewLoginFormRenderer(loginTemplateFile) if err != nil { return nil, err } login := login.NewLogin(identityProvider.Name, c.getCSRF(), &callbackPasswordAuthenticator{passwordAuth, passwordSuccessHandler}, loginFormRenderer) login.Install(mux, loginPath) } if identityProvider.UseAsChallenger { // For now, all password challenges share a single basic challenger, since they'll all respond to any basic credentials challengers["basic-challenge"] = passwordchallenger.NewBasicAuthChallenger("openshift") } } else if configapi.IsOAuthIdentityProvider(identityProvider) { oauthProvider, err := c.getOAuthProvider(identityProvider) if err != nil { return nil, err } // Default state builder, combining CSRF and return URL handling state := external.CSRFRedirectingState(c.getCSRF()) // OAuth auth requires // 1. a session success handler (to remember you logged in) // 2. a state success handler (to go back to the URL encoded in the state) if c.SessionAuth == nil { return nil, errors.New("SessionAuth is required for OAuth-based login") } oauthSuccessHandler := handlers.AuthenticationSuccessHandlers{c.SessionAuth, state} // If the specified errorHandler doesn't handle the login error, let the state error handler attempt to propagate specific errors back to the token requester oauthErrorHandler := handlers.AuthenticationErrorHandlers{errorHandler, state} callbackPath := path.Join(OpenShiftOAuthCallbackPrefix, identityProvider.Name) oauthRedirector, oauthHandler, err := external.NewExternalOAuthRedirector(oauthProvider, state, c.Options.MasterPublicURL+callbackPath, oauthSuccessHandler, oauthErrorHandler, identityMapper) if err != nil { return nil, fmt.Errorf("unexpected error: %v", err) } mux.Handle(callbackPath, oauthHandler) if identityProvider.UseAsLogin { redirectors.Add(identityProvider.Name, oauthRedirector) } if identityProvider.UseAsChallenger { // For now, all password challenges share a single basic challenger, since they'll all respond to any basic credentials challengers["basic-challenge"] = passwordchallenger.NewBasicAuthChallenger("openshift") } } else if requestHeaderProvider, isRequestHeader := identityProvider.Provider.(*configapi.RequestHeaderIdentityProvider); isRequestHeader { // We might be redirecting to an external site, we need to fully resolve the request URL to the public master baseRequestURL, err := url.Parse(c.Options.MasterPublicURL + OpenShiftOAuthAPIPrefix + osinserver.AuthorizePath) if err != nil { return nil, err } if identityProvider.UseAsChallenger { challengers["requestheader-"+identityProvider.Name+"-redirect"] = redirector.NewChallenger(baseRequestURL, requestHeaderProvider.ChallengeURL) } if identityProvider.UseAsLogin { redirectors.Add(identityProvider.Name, redirector.NewRedirector(baseRequestURL, requestHeaderProvider.LoginURL)) } } } if redirectors.Count() > 0 && len(challengers) == 0 { // Add a default challenger that will warn and give a link to the web browser token-granting location challengers["placeholder"] = placeholderchallenger.New(OpenShiftOAuthTokenRequestURL(c.Options.MasterPublicURL)) } var selectProviderTemplateFile string if c.Options.Templates != nil { selectProviderTemplateFile = c.Options.Templates.ProviderSelection } selectProviderRenderer, err := selectprovider.NewSelectProviderRenderer(selectProviderTemplateFile) if err != nil { return nil, err } selectProvider := selectprovider.NewSelectProvider(selectProviderRenderer, c.Options.AlwaysShowProviderSelection) authHandler := handlers.NewUnionAuthenticationHandler(challengers, redirectors, errorHandler, selectProvider) return authHandler, nil } func (c *AuthConfig) getOAuthProvider(identityProvider configapi.IdentityProvider) (external.Provider, error) { switch provider := identityProvider.Provider.(type) { case (*configapi.GitHubIdentityProvider): clientSecret, err := configapi.ResolveStringValue(provider.ClientSecret) if err != nil { return nil, err } return github.NewProvider(identityProvider.Name, provider.ClientID, clientSecret, provider.Organizations, provider.Teams), nil case (*configapi.GitLabIdentityProvider): transport, err := cmdutil.TransportFor(provider.CA, "", "") if err != nil { return nil, err } clientSecret, err := configapi.ResolveStringValue(provider.ClientSecret) if err != nil { return nil, err } return gitlab.NewProvider(identityProvider.Name, transport, provider.URL, provider.ClientID, clientSecret) case (*configapi.GoogleIdentityProvider): clientSecret, err := configapi.ResolveStringValue(provider.ClientSecret) if err != nil { return nil, err } return google.NewProvider(identityProvider.Name, provider.ClientID, clientSecret, provider.HostedDomain) case (*configapi.OpenIDIdentityProvider): transport, err := cmdutil.TransportFor(provider.CA, "", "") if err != nil { return nil, err } clientSecret, err := configapi.ResolveStringValue(provider.ClientSecret) if err != nil { return nil, err } // OpenID Connect requests MUST contain the openid scope value // http://openid.net/specs/openid-connect-core-1_0.html#AuthRequest scopes := sets.NewString("openid") scopes.Insert(provider.ExtraScopes...) config := openid.Config{ ClientID: provider.ClientID, ClientSecret: clientSecret, Scopes: scopes.List(), ExtraAuthorizeParameters: provider.ExtraAuthorizeParameters, AuthorizeURL: provider.URLs.Authorize, TokenURL: provider.URLs.Token, UserInfoURL: provider.URLs.UserInfo, IDClaims: provider.Claims.ID, PreferredUsernameClaims: provider.Claims.PreferredUsername, EmailClaims: provider.Claims.Email, NameClaims: provider.Claims.Name, } return openid.NewProvider(identityProvider.Name, transport, config) default: return nil, fmt.Errorf("No OAuth provider found that matches %v. The OAuth server cannot start!", identityProvider) } } func (c *AuthConfig) getPasswordAuthenticator(identityProvider configapi.IdentityProvider) (authenticator.Password, error) { identityMapper, err := identitymapper.NewIdentityUserMapper(c.IdentityRegistry, c.UserRegistry, identitymapper.MappingMethodType(identityProvider.MappingMethod)) if err != nil { return nil, err } switch provider := identityProvider.Provider.(type) { case (*configapi.AllowAllPasswordIdentityProvider): return allowanypassword.New(identityProvider.Name, identityMapper), nil case (*configapi.DenyAllPasswordIdentityProvider): return denypassword.New(), nil case (*configapi.LDAPPasswordIdentityProvider): url, err := ldaputil.ParseURL(provider.URL) if err != nil { return nil, fmt.Errorf("Error parsing LDAPPasswordIdentityProvider URL: %v", err) } bindPassword, err := configapi.ResolveStringValue(provider.BindPassword) if err != nil { return nil, err } clientConfig, err := ldaputil.NewLDAPClientConfig(provider.URL, provider.BindDN, bindPassword, provider.CA, provider.Insecure) if err != nil { return nil, err } opts := ldappassword.Options{ URL: url, ClientConfig: clientConfig, UserAttributeDefiner: ldaputil.NewLDAPUserAttributeDefiner(provider.Attributes), } return ldappassword.New(identityProvider.Name, opts, identityMapper) case (*configapi.HTPasswdPasswordIdentityProvider): htpasswdFile := provider.File if len(htpasswdFile) == 0 { return nil, fmt.Errorf("HTPasswdFile is required to support htpasswd auth") } if htpasswordAuth, err := htpasswd.New(identityProvider.Name, htpasswdFile, identityMapper); err != nil { return nil, fmt.Errorf("Error loading htpasswd file %s: %v", htpasswdFile, err) } else { return htpasswordAuth, nil } case (*configapi.BasicAuthPasswordIdentityProvider): connectionInfo := provider.RemoteConnectionInfo if len(connectionInfo.URL) == 0 { return nil, fmt.Errorf("URL is required for BasicAuthPasswordIdentityProvider") } transport, err := cmdutil.TransportFor(connectionInfo.CA, connectionInfo.ClientCert.CertFile, connectionInfo.ClientCert.KeyFile) if err != nil { return nil, fmt.Errorf("Error building BasicAuthPasswordIdentityProvider client: %v", err) } return basicauthpassword.New(identityProvider.Name, connectionInfo.URL, transport, identityMapper), nil case (*configapi.KeystonePasswordIdentityProvider): connectionInfo := provider.RemoteConnectionInfo if len(connectionInfo.URL) == 0 { return nil, fmt.Errorf("URL is required for KeystonePasswordIdentityProvider") } transport, err := cmdutil.TransportFor(connectionInfo.CA, connectionInfo.ClientCert.CertFile, connectionInfo.ClientCert.KeyFile) if err != nil { return nil, fmt.Errorf("Error building KeystonePasswordIdentityProvider client: %v", err) } return keystonepassword.New(identityProvider.Name, connectionInfo.URL, transport, provider.DomainName, identityMapper), nil default: return nil, fmt.Errorf("No password auth found that matches %v. The OAuth server cannot start!", identityProvider) } } func (c *AuthConfig) getAuthenticationRequestHandler() (authenticator.Request, error) { var authRequestHandlers []authenticator.Request if c.SessionAuth != nil { authRequestHandlers = append(authRequestHandlers, c.SessionAuth) } for _, identityProvider := range c.Options.IdentityProviders { identityMapper, err := identitymapper.NewIdentityUserMapper(c.IdentityRegistry, c.UserRegistry, identitymapper.MappingMethodType(identityProvider.MappingMethod)) if err != nil { return nil, err } if configapi.IsPasswordAuthenticator(identityProvider) { passwordAuthenticator, err := c.getPasswordAuthenticator(identityProvider) if err != nil { return nil, err } authRequestHandlers = append(authRequestHandlers, basicauthrequest.NewBasicAuthAuthentication(identityProvider.Name, passwordAuthenticator, true)) } else if identityProvider.UseAsChallenger && configapi.IsOAuthIdentityProvider(identityProvider) { oauthProvider, err := c.getOAuthProvider(identityProvider) if err != nil { return nil, err } oauthPasswordAuthenticator, err := external.NewOAuthPasswordAuthenticator(oauthProvider, identityMapper) if err != nil { return nil, fmt.Errorf("unexpected error: %v", err) } authRequestHandlers = append(authRequestHandlers, basicauthrequest.NewBasicAuthAuthentication(identityProvider.Name, oauthPasswordAuthenticator, true)) } else { switch provider := identityProvider.Provider.(type) { case (*configapi.RequestHeaderIdentityProvider): var authRequestHandler authenticator.Request authRequestConfig := &headerrequest.Config{ IDHeaders: provider.Headers, NameHeaders: provider.NameHeaders, EmailHeaders: provider.EmailHeaders, PreferredUsernameHeaders: provider.PreferredUsernameHeaders, } authRequestHandler = headerrequest.NewAuthenticator(identityProvider.Name, authRequestConfig, identityMapper) // Wrap with an x509 verifier if len(provider.ClientCA) > 0 { caData, err := ioutil.ReadFile(provider.ClientCA) if err != nil { return nil, fmt.Errorf("Error reading %s: %v", provider.ClientCA, err) } opts := x509request.DefaultVerifyOptions() opts.Roots = x509.NewCertPool() if ok := opts.Roots.AppendCertsFromPEM(caData); !ok { return nil, fmt.Errorf("Error loading certs from %s: %v", provider.ClientCA, err) } authRequestHandler = x509request.NewVerifier(opts, authRequestHandler, sets.NewString(provider.ClientCommonNames...)) } authRequestHandlers = append(authRequestHandlers, authRequestHandler) } } } authRequestHandler := union.New(authRequestHandlers...) return authRequestHandler, nil } // callbackPasswordAuthenticator combines password auth, successful login callback, // and "then" param redirection type callbackPasswordAuthenticator struct { authenticator.Password handlers.AuthenticationSuccessHandler } // redirectSuccessHandler redirects to the then param on successful authentication type redirectSuccessHandler struct{} // AuthenticationSucceeded informs client when authentication was successful func (redirectSuccessHandler) AuthenticationSucceeded(user kuser.Info, then string, w http.ResponseWriter, req *http.Request) (bool, error) { if len(then) == 0 { return false, fmt.Errorf("Auth succeeded, but no redirect existed - user=%#v", user) } http.Redirect(w, req, then, http.StatusFound) return true, nil }
tdawson/origin
pkg/cmd/server/origin/auth.go
GO
apache-2.0
30,809
/** * Copyright 2009-2017 the original author or authors. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.rabbitframework.dbase; import com.rabbitframework.dbase.builder.Configuration; import com.rabbitframework.dbase.dataaccess.SqlDataAccess; public interface RabbitDbaseFactory { public SqlDataAccess openSqlDataAccess(); public Configuration getConfiguration(); }
xuegongzi/rabbitframework
rabbitframework-dbase/src/main/java/com/rabbitframework/dbase/RabbitDbaseFactory.java
Java
apache-2.0
913
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.plugin.iceberg; import com.google.common.collect.ImmutableList; import io.airlift.units.Duration; import io.trino.plugin.base.classloader.ClassLoaderSafeConnectorSplitSource; import io.trino.spi.connector.ConnectorSession; import io.trino.spi.connector.ConnectorSplitManager; import io.trino.spi.connector.ConnectorSplitSource; import io.trino.spi.connector.ConnectorTableHandle; import io.trino.spi.connector.ConnectorTransactionHandle; import io.trino.spi.connector.Constraint; import io.trino.spi.connector.DynamicFilter; import io.trino.spi.connector.FixedSplitSource; import io.trino.spi.type.TypeManager; import org.apache.iceberg.PartitionField; import org.apache.iceberg.Table; import org.apache.iceberg.TableScan; import javax.inject.Inject; import java.util.Set; import static com.google.common.collect.ImmutableSet.toImmutableSet; import static io.trino.plugin.iceberg.IcebergSessionProperties.getDynamicFilteringWaitTimeout; import static io.trino.plugin.iceberg.IcebergUtil.getColumns; import static io.trino.plugin.iceberg.IcebergUtil.getIdentityPartitions; import static java.util.Objects.requireNonNull; public class IcebergSplitManager implements ConnectorSplitManager { public static final int ICEBERG_DOMAIN_COMPACTION_THRESHOLD = 1000; private final IcebergTransactionManager transactionManager; private final TypeManager typeManager; @Inject public IcebergSplitManager(IcebergTransactionManager transactionManager, TypeManager typeManager) { this.transactionManager = requireNonNull(transactionManager, "transactionManager is null"); this.typeManager = requireNonNull(typeManager, "typeManager is null"); } @Override public ConnectorSplitSource getSplits( ConnectorTransactionHandle transaction, ConnectorSession session, ConnectorTableHandle handle, SplitSchedulingStrategy splitSchedulingStrategy, DynamicFilter dynamicFilter, Constraint constraint) { IcebergTableHandle table = (IcebergTableHandle) handle; if (table.getSnapshotId().isEmpty()) { return new FixedSplitSource(ImmutableList.of()); } Table icebergTable = transactionManager.get(transaction).getIcebergTable(session, table.getSchemaTableName()); Duration dynamicFilteringWaitTimeout = getDynamicFilteringWaitTimeout(session); Set<Integer> identityPartitionFieldIds = getIdentityPartitions(icebergTable.spec()).keySet().stream() .map(PartitionField::sourceId) .collect(toImmutableSet()); Set<IcebergColumnHandle> identityPartitionColumns = getColumns(icebergTable.schema(), typeManager).stream() .filter(column -> identityPartitionFieldIds.contains(column.getId())) .collect(toImmutableSet()); TableScan tableScan = icebergTable.newScan() .useSnapshot(table.getSnapshotId().get()); IcebergSplitSource splitSource = new IcebergSplitSource( table, identityPartitionColumns, tableScan, dynamicFilter, dynamicFilteringWaitTimeout, constraint); return new ClassLoaderSafeConnectorSplitSource(splitSource, Thread.currentThread().getContextClassLoader()); } }
ebyhr/presto
plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergSplitManager.java
Java
apache-2.0
3,938
package controllers; import play.*; import play.mvc.*; import java.util.*; import controllers.shib.Check; import controllers.shib.Security; import controllers.shib.Shibboleth; /** * * Example restricted controller. Every action defined in this class * requires authentication before it can be accessed. * * @author Scott Phillips, http://www.scottphillips.com/ */ @With(Shibboleth.class) public class Administrative extends Controller { @Check("isAdmin") public static void restricted() { render(); } }
play1-maven-plugin/play1-maven-test-projects
external-modules/shibboleth/app/controllers/Administrative.java
Java
apache-2.0
530
package com.sankoudai.java.apix.xstream; import com.sankoudai.java.apix.xstream.entity.Address; import com.sankoudai.java.apix.xstream.entity.AnnotatedAddress; import com.sankoudai.java.apix.xstream.entity.AnnotatedStudent; import com.sankoudai.java.apix.xstream.entity.Student; import com.thoughtworks.xstream.XStream; import com.thoughtworks.xstream.converters.SingleValueConverter; import com.thoughtworks.xstream.io.xml.StaxDriver; import junit.framework.TestCase; import org.xml.sax.InputSource; import javax.xml.transform.OutputKeys; import javax.xml.transform.Source; import javax.xml.transform.Transformer; import javax.xml.transform.sax.SAXSource; import javax.xml.transform.sax.SAXTransformerFactory; import javax.xml.transform.stream.StreamResult; import java.io.*; import java.util.Arrays; public class TestXstream extends TestCase { public void testConverter() { AnnotatedStudent student = exampleAnnotatedStudent(); XStream xStream = new XStream(new StaxDriver()); xStream.autodetectAnnotations(true); xStream.registerConverter(new AddressConverter()); String xml = xStream.toXML(student); System.out.println(formatXml(xml)); } public void testAnnotation() { AnnotatedStudent student = exampleAnnotatedStudent(); //显示指定使用的标记 XStream xStream = new XStream(new StaxDriver()); xStream.processAnnotations(AnnotatedStudent.class); xStream.processAnnotations(AnnotatedAddress.class); String xml = xStream.toXML(student); System.out.println(formatXml(xml)); System.out.println(); //默认开启标记侦测 xStream = new XStream(new StaxDriver()); xStream.autodetectAnnotations(true); xml = xStream.toXML(student); System.out.println(formatXml(xml)); System.out.println(); //不开启标记 xStream = new XStream(new StaxDriver()); xml = xStream.toXML(student); System.out.println(formatXml(xml)); } public void testAlias() { Student student = exampleStudent(); //class alias XStream xStream = new XStream(new StaxDriver()); xStream.alias("student", Student.class); String xml = xStream.toXML(student); System.out.println(formatXml(xml)); System.out.println(); //field alias xStream = new XStream(new StaxDriver()); xStream.aliasField("row", Student.class, "rowNo"); xml = xStream.toXML(student); System.out.println(formatXml(xml)); System.out.println(); //attribute alias xStream = new XStream(new StaxDriver()); xStream.useAttributeFor(Student.class, "rowNo"); xml = xStream.toXML(student); System.out.println(formatXml(xml)); System.out.println(); } public void testToXml() { XStream xStream = new XStream(new StaxDriver()); Student student = exampleStudent(); String xml = xStream.toXML(student); System.out.println(formatXml(xml)); } public void testFromXml() throws UnsupportedEncodingException { XStream xStream = new XStream(new StaxDriver()); Reader reader = exampleReader(); Student student = (Student) xStream.fromXML(reader); System.out.println(student); } /*----辅助方法----*/ AnnotatedStudent exampleAnnotatedStudent() { AnnotatedStudent student = new AnnotatedStudent(); student.setFirstName("Mahesh"); student.setLastName("Parashar"); student.setRowNo(1); student.setClassName("1st"); AnnotatedAddress address = new AnnotatedAddress(); address.setArea("Preet Vihar."); address.setCity("Delhi"); student.setAddresses(Arrays.asList(address)); return student; } Student exampleStudent() { Student student = new Student(); student.setFirstName("Mahesh"); student.setLastName("Parashar"); student.setRowNo(1); student.setClassName("1st"); Address address = new Address(); address.setArea("H.No. 16/3, Preet Vihar."); address.setCity("Delhi"); address.setState("Delhi"); address.setCountry("India"); address.setPincode(110012); student.setAddress(address); return student; } private BufferedReader exampleReader() throws UnsupportedEncodingException { InputStream is = getClass().getResourceAsStream("/xstream/student.xml"); return new BufferedReader(new InputStreamReader(is, "utf-8")); } private String formatXml(String xml) { try { Transformer serializer = SAXTransformerFactory.newInstance().newTransformer(); serializer.setOutputProperty(OutputKeys.INDENT, "yes"); serializer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2"); Source xmlSource = new SAXSource(new InputSource(new ByteArrayInputStream(xml.getBytes()))); StreamResult res = new StreamResult(new ByteArrayOutputStream()); serializer.transform(xmlSource, res); return new String(((ByteArrayOutputStream) res.getOutputStream()).toByteArray()); } catch (Exception e) { return xml; } } public static class AddressConverter implements SingleValueConverter { @Override public String toString(Object o) { if (o == null) { return null; } AnnotatedAddress address = (AnnotatedAddress) o; return address.getCity() + "," + address.getArea(); } @Override public Object fromString(String s) { if(s == null){ return null; } String[] addressDetail = s.split(","); AnnotatedAddress address = new AnnotatedAddress(); address.setCity(addressDetail[0]); address.setArea(addressDetail[1]); return address; } @Override public boolean canConvert(Class type) { return AnnotatedAddress.class.equals(type); } } }
sankoudai/java-knowledge-center
java-knowledge-apix/src/main/java/com/sankoudai/java/apix/xstream/TestXstream.java
Java
apache-2.0
6,215
# Copyright (c) 2013 Altiscale, inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License # Decorators that wrap the benchmark to do additional work before and after module Command attr_reader :description end # SSH and Distcp class RemoteDistCP include Logging include Command HADOOP_FINISHED_STATE = 'FINISHED' JOB_STATUS_SLEEP_INTERNVAL = 15 def initialize(from_dir, to_dir, force = false) @from_dir = from_dir @to_dir = to_dir @force = force @description = "Copy #{@from_dir} => #{@to_dir} (force: #{@force})" end def run(prior_result) ssh_command = SSHRun.new prior_result['host'], prior_result['user'], prior_result['ssh_key'] sleep JOB_STATUS_SLEEP_INTERNVAL until job_finished?(ssh_command, prior_result[:application_num]) dir_exists_status = ssh_command.execute "hadoop fs -test -d #{@to_dir}" ok_to_copy = @force || dir_exists_status[:exit_code] == 1 command = "hadoop distcp #{@from_dir} #{@to_dir}" logger.info "Aborting copy to #{@to_dir}" unless ok_to_copy status = ok_to_copy ? ssh_command.execute(command) : nil status end def job_finished?(ssh, application_num) return true if application_num.nil? # make an ssh rest call rest_call = "curl --get \'http://localhost:9026/ws/v1/cluster/apps/#{application_num}\'" state = nil ssh.execute rest_call do |data| begin json = JSON.parse data state = json['app']['state'] rescue JSON::ParserError => e logger.debug "parse error #{e}" end end logger.debug "state = #{state}" state == HADOOP_FINISHED_STATE end end # Does an Scp class RemoteSCP include Command include Logging def initialize(from_dir, to_dir) @from_dir = from_dir @to_dir = to_dir @description = "scp #{@from_dir} => #{@to_dir})" end def run(prior_result) logger.debug "prior_result: #{prior_result.to_s}" scp = SCPUploader.new prior_result['host'], prior_result['user'], prior_result['ssh_key'] scp.upload @from_dir, @to_dir end end # A Command wrapper that executes a list of commands class CommandChain include Logging def initialize(*commands) @commands = *commands end def add(*commands) return if commands.nil? commands = commands.last if commands.last.is_a?(Array) commands.each do |cmd| @commands << cmd unless cmd.nil? end self end def run(prior_result = { exit_code: 0 }) @commands.each do |cmd| logger.info "executing #{cmd.description}" show_wait_spinner do result = cmd.run prior_result prior_result = result.nil? ? prior_result : result.merge(prior_result) end end prior_result end def commands @commands.clone end def show_wait_spinner(fps = 10) chars = %w[| / - \\] delay = 1.0 / fps iter = 0 spinner = Thread.new do while iter print chars[(iter += 1) % chars.length] sleep delay print "\b" end unless logger.level == Logger::DEBUG end yield.tap do # After yielding to the block, save the return value iter = false # Tell the thread to exit, cleaning up after itself spinner.join # and wait for it to do so. end # Use the block's return value as the method's end end
Altiscale/perf-framework
lib/decorators.rb
Ruby
apache-2.0
3,787
package pib.rest; import com.google.gson.Gson; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.NameValuePair; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.HttpPost; import org.apache.http.conn.ssl.SSLConnectionSocketFactory; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.message.BasicNameValuePair; import org.apache.http.util.EntityUtils; import javax.net.ssl.*; import java.io.IOException; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.security.cert.X509Certificate; import java.util.ArrayList; import java.util.List; /** * This class encapsulates all REST specific methods used to talk to the PiBell-Server, e.g. to * register a new Call request to a PiBell-Pi device. It can be used in local mode which means * the PiBell-Server is expected to run in the local intranet (for testing), or in remote mode that * expects the PiBell-Server on a static * * @author mdrobek * @version 0.1 * @since 30/12/14 */ public class Rest { // Default server address (TODO: move into external config file) private static final String REMOTE_HOST_ADDRESS = "https://unionwork.org"; // Localhost server for testing purposes private static final String LOCAL_HOST_ADDRESS = "http://192.168.0.3:8080"; // The deployment prefix to access the REST API private static final String REST_PREFIX = "/pibell/v1/rest"; // Backend REST interfaces private static final String CALL_URI = "/call"; private static final String STATUS_URI = "/status"; // Connection factory to initiate SSL calls private SSLConnectionSocketFactory sslsf; // The compiled REST API address private final String HOST_ADDRESS; // The local PiBell-GUI user ID name private final String USER_ID; // The listener to be notified about new changes private RestListener listener; /** * Creates a new REST object with the given 'userID' name which is either set in local or remote * mode. * @param aUserID The local PiBell-GUI name to carry out call requests. * @param local True - The Rest object will be initialised with local server settings that * expect the PiBell-Server to be running in the intranet. * False - All requests will be sent to a remotely running PiBell-Server. */ public Rest(String aUserID, RestListener aListener, boolean local) { // 1) Check for local or remote mode and initialise the HOST address respectively if (local) HOST_ADDRESS = LOCAL_HOST_ADDRESS + REST_PREFIX; else HOST_ADDRESS = REMOTE_HOST_ADDRESS + REST_PREFIX; USER_ID = aUserID; this.listener = aListener; // 2) Initialise a SecurityContext to enable SSL calls SSLContext ctx; try { // FIXME: Initialise a TrustManager, that accepts everything // -> programatically import the certificate to the java keystore // see: // http://stackoverflow.com/questions/6755180/java-ssl-connect-add-server-cert-to-keystore-programatically ctx = SSLContext.getInstance("TLS"); ctx.init(new KeyManager[0], new TrustManager[]{ new DefaultTrustManager() }, new SecureRandom()); SSLContext.setDefault(ctx); this.sslsf = new SSLConnectionSocketFactory(ctx); } catch (NoSuchAlgorithmException | KeyManagementException e) { e.printStackTrace(); } } /** * Carries out a call request to the PiBell-Server with the given recipients name (PiBell-Pi * name). * @param recipient Name of recipient to be called. */ public void performStatusRequest(String recipient) throws IOException { // 1) Create an SSL HttpClient CloseableHttpClient httpClient = HttpClients.custom() .setSSLSocketFactory(this.sslsf) .build(); // 2) Create the call POST request HttpPost statusReq = createStatusRequest(recipient); // 3) Carry out the call request HttpResponse response = httpClient.execute(statusReq); HttpEntity resEntity = response.getEntity(); final String response_str = EntityUtils.toString(resEntity); System.out.println("Response is: " + response_str); if (null != response_str) { JsonObject res = new JsonParser().parse(response_str).getAsJsonObject(); this.listener.statusChange(recipient, res.get("isOnline").getAsBoolean(), res.get("lastSeen").getAsLong()); } } /** * Carries out a call request to the PiBell-Server with the given recipients name (PiBell-Pi * name). * @param recipient Name of recipient to be called. */ public void performCallRequest(String recipient) throws IOException { // 1) Create an SSL HttpClient CloseableHttpClient httpClient = HttpClients.custom() .setSSLSocketFactory(this.sslsf) .build(); // 2) Create the call POST request HttpPost callReq = createCallRequest(USER_ID, recipient); // 3) Carry out the call request HttpResponse response = httpClient.execute(callReq); HttpEntity resEntity = response.getEntity(); final String response_str = EntityUtils.toString(resEntity); // System.out.println("Response is: " + response_str); } /** * Creates a new HTTPPost request with the given userID (local PiBell-GUI name) to check its * online status. * @param userID The local user name (PiBell-GUI name) that is used to indicate the origin of * the registered call. */ private HttpPost createStatusRequest(String userID) { // Create data (payload) List<NameValuePair> reqParams = new ArrayList<>(); reqParams.add(new BasicNameValuePair("target", userID)); return createPostRequest(STATUS_URI, reqParams); } /** * Creates a new HTTPPost request with the given userID (local PiBell-GUI name) and the given * recipients name (PiBell-Pi name) as the requests form parameters. * @param userID The local user name (PiBell-GUI name) that is used to indicate the origin of * the registered call. * @param recipient The name of the recipient for the registered call (PiBell-Pi name). */ private HttpPost createCallRequest(String userID, String recipient) { List<NameValuePair> reqParams = new ArrayList<>(); reqParams.add(new BasicNameValuePair("caller", userID)); reqParams.add(new BasicNameValuePair("rec", recipient)); return createPostRequest(CALL_URI, reqParams); } /** * Creates a new HTTPPost request for the given URI suffix and its request parameters. * @param URI_suffix The last part of the Server URI, e.g., CALL, STATUS and so on * @param reqParams A list of request parameters * @return A created POST request. */ private HttpPost createPostRequest(String URI_suffix, List<NameValuePair> reqParams) { try { // Create the form parameter objects for the POST request UrlEncodedFormEntity reqEntity = new UrlEncodedFormEntity(reqParams); // Create the post request for the given HOST address and the REST API location HttpPost post = new HttpPost(HOST_ADDRESS + URI_suffix); // Add the form parameters to the POST request post.setEntity(reqEntity); return post; } catch (IOException e) { e.printStackTrace(); } return null; } /** * A simple and empty TrustManager implementation. This is not secure and shouldn't be used * in a productive environment, since it merely accepts every certificate. */ private static class DefaultTrustManager implements X509TrustManager { @Override public void checkClientTrusted(X509Certificate[] arg0, String arg1) {} @Override public void checkServerTrusted(X509Certificate[] arg0, String arg1) {} @Override public X509Certificate[] getAcceptedIssuers() { return null; } } }
mdrobek/PiBell
PiBell-GUI/src/main/java/pib/rest/Rest.java
Java
apache-2.0
8,564
import { Directive, forwardRef } from '@angular/core'; import { NG_VALIDATORS, Validator, AbstractControl } from '@angular/forms'; import { base64 } from './validator'; const BASE64_VALIDATOR: any = { provide: NG_VALIDATORS, useExisting: forwardRef(() => Base64Validator), multi: true }; @Directive({ selector: '[base64][formControlName],[base64][formControl],[base64][ngModel]', providers: [BASE64_VALIDATOR] }) export class Base64Validator implements Validator { validate(c: AbstractControl): {[key: string]: any} { return base64(c); } }
1ziton/ng-starter
src/app/shared/validator/base64/directive.ts
TypeScript
apache-2.0
561
package com.hdu.coolweather.model; /** * Created by hdu on 15/4/10. */ public class Province { private int id; private String provinceName; private String provinceCode; public int getId() { return id; } public void setId(int id) { this.id = id; } public String getProvinceName() { return provinceName; } public void setProvinceName(String provinceName) { this.provinceName = provinceName; } public String getProvinceCode() { return provinceCode; } public void setProvinceCode(String provinceCode) { this.provinceCode = provinceCode; } }
duhongjin8/CoolWeather
app/src/main/java/com/hdu/coolweather/model/Province.java
Java
apache-2.0
651
// Code generated by protoc-gen-gogo. DO NOT EDIT. // source: policy/v1beta1/type.proto // Describes the rules used to configure Mixer's policy and telemetry features. package v1beta1 import ( bytes "bytes" fmt "fmt" github_com_gogo_protobuf_jsonpb "github.com/gogo/protobuf/jsonpb" proto "github.com/gogo/protobuf/proto" _ "github.com/gogo/protobuf/types" math "math" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // MarshalJSON is a custom marshaler for Value func (this *Value) MarshalJSON() ([]byte, error) { str, err := TypeMarshaler.MarshalToString(this) return []byte(str), err } // UnmarshalJSON is a custom unmarshaler for Value func (this *Value) UnmarshalJSON(b []byte) error { return TypeUnmarshaler.Unmarshal(bytes.NewReader(b), this) } // MarshalJSON is a custom marshaler for IPAddress func (this *IPAddress) MarshalJSON() ([]byte, error) { str, err := TypeMarshaler.MarshalToString(this) return []byte(str), err } // UnmarshalJSON is a custom unmarshaler for IPAddress func (this *IPAddress) UnmarshalJSON(b []byte) error { return TypeUnmarshaler.Unmarshal(bytes.NewReader(b), this) } // MarshalJSON is a custom marshaler for Duration func (this *Duration) MarshalJSON() ([]byte, error) { str, err := TypeMarshaler.MarshalToString(this) return []byte(str), err } // UnmarshalJSON is a custom unmarshaler for Duration func (this *Duration) UnmarshalJSON(b []byte) error { return TypeUnmarshaler.Unmarshal(bytes.NewReader(b), this) } // MarshalJSON is a custom marshaler for TimeStamp func (this *TimeStamp) MarshalJSON() ([]byte, error) { str, err := TypeMarshaler.MarshalToString(this) return []byte(str), err } // UnmarshalJSON is a custom unmarshaler for TimeStamp func (this *TimeStamp) UnmarshalJSON(b []byte) error { return TypeUnmarshaler.Unmarshal(bytes.NewReader(b), this) } // MarshalJSON is a custom marshaler for DNSName func (this *DNSName) MarshalJSON() ([]byte, error) { str, err := TypeMarshaler.MarshalToString(this) return []byte(str), err } // UnmarshalJSON is a custom unmarshaler for DNSName func (this *DNSName) UnmarshalJSON(b []byte) error { return TypeUnmarshaler.Unmarshal(bytes.NewReader(b), this) } // MarshalJSON is a custom marshaler for StringMap func (this *StringMap) MarshalJSON() ([]byte, error) { str, err := TypeMarshaler.MarshalToString(this) return []byte(str), err } // UnmarshalJSON is a custom unmarshaler for StringMap func (this *StringMap) UnmarshalJSON(b []byte) error { return TypeUnmarshaler.Unmarshal(bytes.NewReader(b), this) } // MarshalJSON is a custom marshaler for EmailAddress func (this *EmailAddress) MarshalJSON() ([]byte, error) { str, err := TypeMarshaler.MarshalToString(this) return []byte(str), err } // UnmarshalJSON is a custom unmarshaler for EmailAddress func (this *EmailAddress) UnmarshalJSON(b []byte) error { return TypeUnmarshaler.Unmarshal(bytes.NewReader(b), this) } // MarshalJSON is a custom marshaler for Uri func (this *Uri) MarshalJSON() ([]byte, error) { str, err := TypeMarshaler.MarshalToString(this) return []byte(str), err } // UnmarshalJSON is a custom unmarshaler for Uri func (this *Uri) UnmarshalJSON(b []byte) error { return TypeUnmarshaler.Unmarshal(bytes.NewReader(b), this) } var ( TypeMarshaler = &github_com_gogo_protobuf_jsonpb.Marshaler{} TypeUnmarshaler = &github_com_gogo_protobuf_jsonpb.Unmarshaler{} )
geeknoid/api
policy/v1beta1/type_json.gen.go
GO
apache-2.0
3,458
package orchastack.core.event.adapter.hornetq.adapter; import orchastack.core.event.adapter.hornetq.msg.HornetqMessageUnmarshaller; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.espertech.esper.adapter.AdapterSPI; import com.espertech.esper.adapter.AdapterState; import com.espertech.esper.adapter.AdapterStateManager; import com.espertech.esper.adapter.InputAdapter; import com.espertech.esper.client.EPException; import com.espertech.esper.client.EPServiceProvider; import com.espertech.esper.core.service.EPServiceProviderSPI; import com.espertech.esper.util.ExecutionPathDebugLog; /** * Created for ESPER. */ public abstract class HornetqInputAdapter implements InputAdapter, AdapterSPI { private final Log log = LogFactory.getLog(this.getClass()); /** * Manages adapter state. */ protected final AdapterStateManager stateManager = new AdapterStateManager(); /** * Engine services. */ protected EPServiceProviderSPI epServiceProviderSPI; /** * Start time. */ protected long startTime; /** * Unmarshaller for JMS messages. */ protected HornetqMessageUnmarshaller messageUnmarshaller; /** * Returns the unmarshaller. * @return unmarshaller */ public HornetqMessageUnmarshaller getMessageUnmarshaller() { return messageUnmarshaller; } /** * Sets the unmarshaller to use. * @param jmsMessageUnmarshaller is the unmarshaller to use */ public void setMessageUnmarshaller( HornetqMessageUnmarshaller messageUnmarshaller) { this.messageUnmarshaller = messageUnmarshaller; } public EPServiceProvider getEPServiceProvider() { return epServiceProviderSPI; } public void setEPServiceProvider(EPServiceProvider epService) { if (epService == null) { throw new IllegalArgumentException("Null service provider"); } if (!(epService instanceof EPServiceProviderSPI)) { throw new IllegalArgumentException("Cannot downcast service provider to SPI"); } epServiceProviderSPI = (EPServiceProviderSPI) epService; } public void start() throws EPException { if ((ExecutionPathDebugLog.isDebugEnabled) && (log.isDebugEnabled())) { log.debug(".start"); } if (epServiceProviderSPI.getEPRuntime() == null) { throw new EPException( "Attempting to start an Adapter that hasn't had the epService provided"); } startTime = System.currentTimeMillis(); if (log.isDebugEnabled()) { log.debug(".start startTime==" + startTime); } stateManager.start(); } public void pause() throws EPException { if ((ExecutionPathDebugLog.isDebugEnabled) && (log.isDebugEnabled())) { log.debug(".pause"); } stateManager.pause(); } public void resume() throws EPException { if ((ExecutionPathDebugLog.isDebugEnabled) && (log.isDebugEnabled())) { log.debug(".resume"); } stateManager.resume(); } public void stop() throws EPException { if ((ExecutionPathDebugLog.isDebugEnabled) && (log.isDebugEnabled())) { log.debug(".stop"); } stateManager.stop(); } public void destroy() throws EPException { if ((ExecutionPathDebugLog.isDebugEnabled) && (log.isDebugEnabled())) { log.debug(".destroy"); } stateManager.destroy(); } public AdapterState getState() { return stateManager.getState(); } }
mathews/orchastack-core
orchastack.core.event/orchastack.core.event.adapter.kafka/src/main/java/orchastack/core/event/adapter/hornetq/adapter/HornetqInputAdapter.java
Java
apache-2.0
3,925
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.exec.vector.expressions; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; /** * Cast a string to a decimal. * * If other functions besides cast need to take a string in and produce a decimal, * you can subclass this class or convert it to a superclass, and * implement different "func()" methods for each operation. */ public class CastStringToDecimal extends VectorExpression { private static final long serialVersionUID = 1L; int inputColumn; int outputColumn; public CastStringToDecimal(int inputColumn, int outputColumn) { this.inputColumn = inputColumn; this.outputColumn = outputColumn; this.outputType = "decimal"; } public CastStringToDecimal() { super(); this.outputType = "decimal"; } /** * Convert input string to a decimal, at position i in the respective vectors. */ protected void func(DecimalColumnVector outV, BytesColumnVector inV, int i) { String s; try { /* If this conversion is frequently used, this should be optimized, * e.g. by converting to decimal from the input bytes directly without * making a new string. */ s = new String(inV.vector[i], inV.start[i], inV.length[i], "UTF-8"); outV.vector[i].set(HiveDecimal.create(s)); } catch (Exception e) { // for any exception in conversion to decimal, produce NULL outV.noNulls = false; outV.isNull[i] = true; } } @Override public void evaluate(VectorizedRowBatch batch) { if (childExpressions != null) { super.evaluateChildren(batch); } BytesColumnVector inV = (BytesColumnVector) batch.cols[inputColumn]; int[] sel = batch.selected; int n = batch.size; DecimalColumnVector outV = (DecimalColumnVector) batch.cols[outputColumn]; if (n == 0) { // Nothing to do return; } if (inV.noNulls) { outV.noNulls = true; if (inV.isRepeating) { outV.isRepeating = true; func(outV, inV, 0); } else if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; func(outV, inV, i); } outV.isRepeating = false; } else { for(int i = 0; i != n; i++) { func(outV, inV, i); } outV.isRepeating = false; } } else { // Handle case with nulls. Don't do function if the value is null, // because the data may be undefined for a null value. outV.noNulls = false; if (inV.isRepeating) { outV.isRepeating = true; outV.isNull[0] = inV.isNull[0]; if (!inV.isNull[0]) { func(outV, inV, 0); } } else if (batch.selectedInUse) { for(int j = 0; j != n; j++) { int i = sel[j]; outV.isNull[i] = inV.isNull[i]; if (!inV.isNull[i]) { func(outV, inV, i); } } outV.isRepeating = false; } else { System.arraycopy(inV.isNull, 0, outV.isNull, 0, n); for(int i = 0; i != n; i++) { if (!inV.isNull[i]) { func(outV, inV, i); } } outV.isRepeating = false; } } } @Override public int getOutputColumn() { return outputColumn; } public void setOutputColumn(int outputColumn) { this.outputColumn = outputColumn; } public int getInputColumn() { return inputColumn; } public void setInputColumn(int inputColumn) { this.inputColumn = inputColumn; } @Override public String vectorExpressionParameters() { return "col " + inputColumn; } @Override public VectorExpressionDescriptor.Descriptor getDescriptor() { VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder(); b.setMode(VectorExpressionDescriptor.Mode.PROJECTION) .setNumArguments(1) .setArgumentTypes( VectorExpressionDescriptor.ArgumentType.STRING_FAMILY) .setInputExpressionTypes( VectorExpressionDescriptor.InputExpressionType.COLUMN); return b.build(); } }
vergilchiu/hive
ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java
Java
apache-2.0
5,154
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.xenei.junit.contract.exampleTests; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.runner.RunWith; import org.xenei.junit.bad.BadNoInject; import org.xenei.junit.contract.Contract; import org.xenei.junit.contract.ContractExclude; import org.xenei.junit.contract.ContractImpl; import org.xenei.junit.contract.ContractSuite; import org.xenei.junit.contract.IProducer; /** * Run the C tests using the contract suite runner. * * This will run the tests defined in CT as well as AT (A contract tests) and BT * (B contract tests). Compare this to CImplTest. * * Note that producer used for the AT and BT classes will be the * IProducer&lt;CImpl$gt; from this class. * * The use of the Listener interface in the before and after methods are to * track that the tests are run correctly and in the proper order. This would * not be used in a production test but are part of our testing of * junit-contracts. * */ // run as a contract test @RunWith(ContractSuite.class) // testing the CImpl class. @ContractImpl(value = CImpl.class, ignore = { BadNoInject.class }) @ContractExclude(value = BT.class, methods = { "testGetBInt" }) public class CImplContractTestWithExclude { // the producer to use for all the tests private final IProducer<CImpl> producer = new IProducer<CImpl>() { @Override public CImpl newInstance() { Listener.add( "CImplContractTest.producer.newInstance()" ); return new CImpl(); } @Override public void cleanUp() { Listener.add( "CImplContractTest.producer.cleanUp()" ); } }; /** * The method to inject the producer into the test classes. * * @return The producer we want to use for the tests. */ @Contract.Inject public IProducer<CImpl> getProducer() { return producer; } /** * Clean up the listener for the tests. */ @BeforeClass public static void beforeClass() { Listener.clear(); } private static void verifyTest(List<String> expectedTests, List<String> results) { Assert.assertEquals( "CImplContractTest.producer.newInstance()", results.get( 0 ) ); Assert.assertTrue( "Missing " + results.get( 1 ), expectedTests.contains( results.get( 1 ) ) ); expectedTests.remove( results.get( 1 ) ); Assert.assertEquals( "CImplContractTest.producer.cleanUp()", results.get( 2 ) ); } /** * Verify that the Listener recorded all the expected events. */ @AfterClass public static void afterClass() { final String[] testNames = { "cname", "cname version of bname", "cname version of aname" }; final List<String> expectedTests = new ArrayList<String>( Arrays.asList( testNames ) ); final List<String> l = Listener.get(); for (int i = 0; i < testNames.length; i++) { final int j = i * 3; verifyTest( expectedTests, l.subList( j, j + 3 ) ); } Assert.assertTrue( expectedTests.isEmpty() ); } }
Claudenw/junit-contracts
junit/src/test/java/org/xenei/junit/contract/exampleTests/CImplContractTestWithExclude.java
Java
apache-2.0
3,981
// Copyright (c) 2017, Baidu.com, Inc. All Rights Reserved // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. #include <gtest/gtest.h> #include <string> #include "olap/column_file/bloom_filter.hpp" #include "util/logging.h" using std::string; namespace palo { namespace column_file { class TestBloomFilter : public testing::Test { public: virtual ~TestBloomFilter() {} virtual void SetUp() {} virtual void TearDown() {} }; // Init BloomFilter with different item number and fpp, // and verify bit_num and hash_function_num calculated by BloomFilter TEST_F(TestBloomFilter, init_bloom_filter) { { BloomFilter bf; bf.init(1024); ASSERT_EQ(6400, bf.bit_num()); ASSERT_EQ(4, bf.hash_function_num()); } { BloomFilter bf; bf.init(1024, 0.01); ASSERT_EQ(9856, bf.bit_num()); ASSERT_EQ(7, bf.hash_function_num()); } { BloomFilter bf; bf.init(10240, 0.1); ASSERT_EQ(49088, bf.bit_num()); ASSERT_EQ(3, bf.hash_function_num()); } { BloomFilter bf; uint32_t data_len = 100; uint32_t hash_function_num = 4; uint64_t* data = new uint64_t[data_len]; bf.init(data, data_len, hash_function_num); ASSERT_EQ(6400, bf.bit_num()); ASSERT_EQ(4, bf.hash_function_num()); ASSERT_EQ(data, bf.bit_set_data()); bf.reset(); ASSERT_EQ(0, bf.bit_num()); ASSERT_EQ(0, bf.hash_function_num()); ASSERT_EQ(NULL, bf.bit_set_data()); } } // Add different buffer to BloomFilter and verify existence TEST_F(TestBloomFilter, add_and_test_bytes) { string bytes; BloomFilter bf; bf.init(1024); bf.add_bytes(NULL, 0); ASSERT_TRUE(bf.test_bytes(NULL, 0)); bytes = "hello"; bf.add_bytes(bytes.c_str(), bytes.size()); ASSERT_TRUE(bf.test_bytes(bytes.c_str(), bytes.size())); bytes = "palo"; bf.add_bytes(bytes.c_str(), bytes.size()); ASSERT_TRUE(bf.test_bytes(bytes.c_str(), bytes.size())); BloomFilter new_bf; new_bf.init(1024); bytes = "world"; new_bf.add_bytes(bytes.c_str(), bytes.size()); ASSERT_TRUE(bf.merge(new_bf)); ASSERT_TRUE(bf.test_bytes(bytes.c_str(), bytes.size())); } // Print bloom filter buffer and points of specified string TEST_F(TestBloomFilter, bloom_filter_info) { string bytes; BloomFilter bf; bf.init(8, 0.1); bytes = "palo"; bf.add_bytes(bytes.c_str(), bytes.size()); string buffer_expect = "bit_num:64 hash_function_num:6 " "bit_set:0000000000000000101000000000000000000010100000000000000000101000"; string buffer = bf.to_string(); ASSERT_TRUE(buffer_expect == buffer); string points_expect = "58-16-38-60-18-40"; string points = bf.get_bytes_points_string(bytes.c_str(), bytes.size()); ASSERT_TRUE(points_expect == points); bytes = "a"; points = bf.get_bytes_points_string(bytes.c_str(), bytes.size()); OLAP_LOG_WARNING("bytes=%s points=%s", bytes.c_str(), points.c_str()); bytes = "ab"; points = bf.get_bytes_points_string(bytes.c_str(), bytes.size()); OLAP_LOG_WARNING("bytes=%s points=%s", bytes.c_str(), points.c_str()); bytes = "b"; points = bf.get_bytes_points_string(bytes.c_str(), bytes.size()); OLAP_LOG_WARNING("bytes=%s points=%s", bytes.c_str(), points.c_str()); bytes = "ba"; points = bf.get_bytes_points_string(bytes.c_str(), bytes.size()); OLAP_LOG_WARNING("bytes=%s points=%s", bytes.c_str(), points.c_str()); bytes = "c"; points = bf.get_bytes_points_string(bytes.c_str(), bytes.size()); OLAP_LOG_WARNING("bytes=%s points=%s", bytes.c_str(), points.c_str()); bytes = "bc"; points = bf.get_bytes_points_string(bytes.c_str(), bytes.size()); OLAP_LOG_WARNING("bytes=%s points=%s", bytes.c_str(), points.c_str()); bytes = "ac"; points = bf.get_bytes_points_string(bytes.c_str(), bytes.size()); OLAP_LOG_WARNING("bytes=%s points=%s", bytes.c_str(), points.c_str()); bytes = "abc"; points = bf.get_bytes_points_string(bytes.c_str(), bytes.size()); OLAP_LOG_WARNING("bytes=%s points=%s", bytes.c_str(), points.c_str()); } } // namespace column_file } // namespace palo int main(int argc, char **argv) { std::string conffile = std::string(getenv("PALO_HOME")) + "/conf/be.conf"; if (!palo::config::init(conffile.c_str(), false)) { fprintf(stderr, "error read config file. \n"); return -1; } palo::init_glog("be-test"); ::testing::InitGoogleTest(&argc, argv); return RUN_ALL_TESTS(); }
lingbin/palo
be/test/olap/bloom_filter_test.cpp
C++
apache-2.0
5,117
<?php class themesModel extends db { function __construct() { parent::__construct(); } public function getThemeInformation($data) { return $this->query($this->selectQuery($data)); } public function addThemeInformation($data) { return $this->query($this->insertQuery($data)); } private function selectQuery($data) { return " SELECT t.themeId as themeId, t.eventId as eventId, t.theme as theme, t.type as type, t.selectable as selectable, ta.themeAwardId as themeAwardId, ta.award as award, ta.place as place FROM themes t, themeAwards ta WHERE t.eventId = '{$this->escapeCharacters($data['eventId'])}' AND t.themeId = ta.themeId ORDER by t.theme, ta.place ; "; } private function insertQuery($data) { return " INSERT INTO themes ( eventId, theme, type, selectable ) VALUES ( '{$this->escapeCharacters($data['eventId'])}', '{$this->escapeCharacters($data['theme'])}', '{$this->escapeCharacters($data['type'])}', '{$this->escapeCharacters($data['selectable'])}' ) ; "; } } ?>
brianpilati/brickslopes
app/php/models/themesModel.php
PHP
apache-2.0
1,542
/* * Knetik Platform API Documentation latest * This is the spec for the Knetik API. Use this in conjunction with the documentation found at https://knetikcloud.com. * * OpenAPI spec version: latest * Contact: support@knetik.com * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ package com.knetikcloud.model; import java.util.Objects; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonValue; import com.knetikcloud.model.SimpleReferenceResourceint; import com.knetikcloud.model.SimpleUserResource; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; /** * UserItemLogResource */ @javax.annotation.Generated(value = "io.swagger.codegen.languages.JavaClientCodegen", date = "2018-03-14T12:03:43.231-04:00") public class UserItemLogResource { @JsonProperty("id") private Integer id = null; @JsonProperty("info") private String info = null; @JsonProperty("item") private SimpleReferenceResourceint item = null; @JsonProperty("log_date") private Long logDate = null; @JsonProperty("type") private String type = null; @JsonProperty("user") private SimpleUserResource user = null; @JsonProperty("user_inventory") private Integer userInventory = null; /** * The log entry id * @return id **/ @ApiModelProperty(value = "The log entry id") public Integer getId() { return id; } public UserItemLogResource info(String info) { this.info = info; return this; } /** * Additional information defined by the type * @return info **/ @ApiModelProperty(value = "Additional information defined by the type") public String getInfo() { return info; } public void setInfo(String info) { this.info = info; } public UserItemLogResource item(SimpleReferenceResourceint item) { this.item = item; return this; } /** * The item interacted with * @return item **/ @ApiModelProperty(value = "The item interacted with") public SimpleReferenceResourceint getItem() { return item; } public void setItem(SimpleReferenceResourceint item) { this.item = item; } /** * The date/time this event occurred in seconds since epoch * @return logDate **/ @ApiModelProperty(value = "The date/time this event occurred in seconds since epoch") public Long getLogDate() { return logDate; } public UserItemLogResource type(String type) { this.type = type; return this; } /** * The type of event * @return type **/ @ApiModelProperty(value = "The type of event") public String getType() { return type; } public void setType(String type) { this.type = type; } public UserItemLogResource user(SimpleUserResource user) { this.user = user; return this; } /** * The user making the interaction * @return user **/ @ApiModelProperty(value = "The user making the interaction") public SimpleUserResource getUser() { return user; } public void setUser(SimpleUserResource user) { this.user = user; } public UserItemLogResource userInventory(Integer userInventory) { this.userInventory = userInventory; return this; } /** * The id of the inventory entry this event is related to, if any * @return userInventory **/ @ApiModelProperty(value = "The id of the inventory entry this event is related to, if any") public Integer getUserInventory() { return userInventory; } public void setUserInventory(Integer userInventory) { this.userInventory = userInventory; } @Override public boolean equals(java.lang.Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } UserItemLogResource userItemLogResource = (UserItemLogResource) o; return Objects.equals(this.id, userItemLogResource.id) && Objects.equals(this.info, userItemLogResource.info) && Objects.equals(this.item, userItemLogResource.item) && Objects.equals(this.logDate, userItemLogResource.logDate) && Objects.equals(this.type, userItemLogResource.type) && Objects.equals(this.user, userItemLogResource.user) && Objects.equals(this.userInventory, userItemLogResource.userInventory); } @Override public int hashCode() { return Objects.hash(id, info, item, logDate, type, user, userInventory); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class UserItemLogResource {\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append(" info: ").append(toIndentedString(info)).append("\n"); sb.append(" item: ").append(toIndentedString(item)).append("\n"); sb.append(" logDate: ").append(toIndentedString(logDate)).append("\n"); sb.append(" type: ").append(toIndentedString(type)).append("\n"); sb.append(" user: ").append(toIndentedString(user)).append("\n"); sb.append(" userInventory: ").append(toIndentedString(userInventory)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
knetikmedia/knetikcloud-java-client
src/main/java/com/knetikcloud/model/UserItemLogResource.java
Java
apache-2.0
5,608
// Copyright (c) 2015 // Author: Chrono Law #include <std.hpp> //using namespace std; using std::cout; using std::endl; //#define BOOST_THREAD_VERSION 4 #include <boost/thread.hpp> using namespace boost; ////////////////////////////////////////// #include <boost/chrono.hpp> using namespace boost::chrono; seconds operator"" _s(unsigned long long n) { return seconds(n); } milliseconds operator"" _ms(unsigned long long n) { return milliseconds(n); } ////////////////////////////////////////// void case1() { { thread t1; assert(!t1.joinable()); thread t2([]{cout << "a thread" << endl;}); assert(t2.joinable()); } thread t1,t2; cout << t1.get_id() << endl; assert(t1.get_id() == t2.get_id()); cout << thread::hardware_concurrency() << endl; cout << thread::physical_concurrency() << endl; } ////////////////////////////////////////// #include <boost/bind.hpp> void dummy(int n) { for(int i = 0;i < n; ++i); cout << n << endl; } void case2() { //thread t1(dummy, 100); //thread t2(dummy, 500); thread t1(bind(dummy, 100)); thread t2([]{dummy(500);}); //this_thread::sleep_for(200_ms); t1.try_join_for(100_ms); t2.join(); } ////////////////////////////////////////// void case3() { thread t1(dummy, 100); t1.detach(); assert(!t1.joinable()); thread(dummy, 1000).detach(); this_thread::sleep_for(200_ms); } ////////////////////////////////////////// #include <boost/thread/thread_guard.hpp> #include <boost/thread/scoped_thread.hpp> void case4() { thread t1(dummy, 200); thread t2(dummy, 300); thread_guard<detach> g1(t1); thread_guard<> g2(t2); { scoped_thread<detach> t1(dummy, 10); scoped_thread<> t2(dummy, 20); } this_thread::sleep_for(100_ms); } ////////////////////////////////////////// void to_interrupt(int x) try { for (int i = 0;i < x; ++i) { //this_thread::sleep_for(400_ms); cout << i << endl; this_thread::interruption_point(); } } catch(const thread_interrupted& ) { cout << "thread_interrupted" << endl; } void case5() { thread t(to_interrupt,10); //this_thread::sleep_for(1_s); t.interrupt(); assert(t.interruption_requested()); t.join(); } ////////////////////////////////////////// void to_interrupt2(int x) try { using namespace this_thread; assert(interruption_enabled()); for (int i = 0;i < x; ++i) { disable_interruption di; assert(!interruption_enabled()); cout << i << endl; cout << this_thread::interruption_requested() << endl; this_thread::interruption_point(); restore_interruption ri(di); assert(interruption_enabled()); cout << "can interrupted" << endl; cout << this_thread::interruption_requested() << endl; this_thread::interruption_point(); } assert(interruption_enabled()); } catch(const thread_interrupted& ) { cout << "[thread_interrupted]" << endl; } void case6() { thread t(to_interrupt2,10); //this_thread::sleep_for(1_s); t.interrupt(); assert(t.interruption_requested()); t.join(); } ////////////////////////////////////////// void case7() { thread_group tg; tg.create_thread(bind(dummy, 100)); tg.create_thread(bind(dummy, 200)); tg.join_all(); } ////////////////////////////////////////// int g_count; void init_count(int x) { cout << "should call once." << endl; g_count = x; } void call_func() { static once_flag once; call_once(once, init_count, 10); } void case8() { (scoped_thread<>(call_func)); (scoped_thread<>(call_func)); } ////////////////////////////////////////// int fab(int n) { if(n == 0 || n == 1) { return 1; } return fab(n-1) + fab(n-2); } ////////////////////////////////////////// int main() { case1(); case2(); case3(); case4(); case5(); case6(); case7(); case8(); }
chronolaw/boost_guide
concurrency/thread1.cpp
C++
apache-2.0
4,025
/** \file pos_table.cpp * Implementation of class POSTable. * * \author Jun Jiang * \version 0.1 * \date Sep 11, 2009 */ #include "pos_table.h" #include "jma_dictionary.h" #include "ijma/knowledge.h" // Knowledge::encodeStr() #include "iconv_utils.h" // MeCab::Iconv #include <cassert> #include <fstream> #include <iostream> #include <sstream> #include <strstream> #define JMA_DEBUG_PRINT_COMBINE 0 using namespace std; namespace jma { POSTable::POSTable() : strTableVec_(POS_FORMAT_NUM), tableSize_(0), ruleRoot_(0) { } POSTable::~POSTable() { delete ruleRoot_; } bool POSTable::loadConfig(const char* fileName, MeCab::Iconv& iconv) { assert(fileName); // remove the previous table if exists tableSize_ = 0; for(unsigned int i=0; i<strTableVec_.size(); ++i) strTableVec_[i].clear(); // open file const DictUnit* dict = JMA_Dictionary::instance()->getDict(fileName); if(! dict) { cerr << "cannot find configuration file: " << fileName << endl; return false; } istrstream from(dict->text_, dict->length_); if(! from) { cerr << "cannot read configuration file: " << fileName << endl; return false; } // read file string line, fullPOS, partPOS, alphabetPOS; string::size_type i, j, k; const char* whitespaces = " \t"; // to convert from index string to int value istringstream iss; int indexValue; #if JMA_DEBUG_PRINT cout << "load POS table: " << fileName << endl; cout << "src charset: " << src << endl; cout << "dest charset: " << dest << endl; cout << "fullPOS\t\tindex\tAlphabet partPOS" << endl; #endif // each line is assumed in the format "fullPOS index AlphabetPOS", // those lines not in this format would be ignored while(getline(from, line)) { line = line.substr(0, line.find('\r')); if(line.empty()) continue; // set fullPOS j = line.find_first_of(whitespaces); if(j == string::npos) continue; fullPOS = line.substr(0, j); // convert encoding if(! iconv.convert(&fullPOS)) { cerr << "error to convert encoding for POS string " << fullPOS << endl; return false; } k = fullPOS.find('*'); if(k == string::npos) partPOS = fullPOS; else if(k == 0) partPOS = ""; else partPOS = fullPOS.substr(0, k-1); // get "AAA" from "AAA,*" // set index i = line.find_first_not_of(whitespaces, j+1); if(i == string::npos) continue; j = line.find_first_of(whitespaces, i); if(j == string::npos) continue; iss.clear(); iss.str(line.substr(i, j-i)); iss >> indexValue; // set alphabetPOS i = line.find_first_not_of(whitespaces, j+1); if(i == string::npos) continue; j = line.find_first_of(whitespaces, i); if(j == string::npos) alphabetPOS = line.substr(i); else alphabetPOS = line.substr(i, j-i); #if JMA_DEBUG_PRINT cout << fullPOS << "\t" << indexValue << "\t" << alphabetPOS << "\t" << partPOS << endl; #endif if(indexValue < 0) { cerr << "the index code of POS (" << fullPOS << ", " << alphabetPOS << ") should not be a negative value: " << indexValue << endl; continue; } if(indexValue >= tableSize_) { tableSize_ = indexValue + 1; for(unsigned int i=0; i<strTableVec_.size(); ++i) strTableVec_[i].resize(tableSize_); } strTableVec_[POS_FORMAT_DEFAULT][indexValue] = partPOS; strTableVec_[POS_FORMAT_ALPHABET][indexValue] = alphabetPOS; strTableVec_[POS_FORMAT_FULL_CATEGORY][indexValue] = fullPOS; alphaPOSMap_[alphabetPOS] = indexValue; } #if JMA_DEBUG_PRINT cout << "total " << tableSize_ << " POS loaded" << endl << endl; #endif return true; } const char* POSTable::getPOS(int index, POSFormat format) const { if(index < 0 || index >= tableSize_) return ""; return strTableVec_[format][index].c_str(); } bool POSTable::loadCombineRule(const char* fileName) { assert(fileName); // remove the previous rule Trie if exists delete ruleRoot_; ruleRoot_ = new RuleNode(0, tableSize_); // open file ifstream from(fileName); if(! from) { return false; } // read file string line, pos; vector<string> posVec; vector<string>::const_iterator it; istringstream iss; #if JMA_DEBUG_PRINT cout << "load POS rule: " << fileName << endl; cout << "source1 source2 ... target" << endl; #endif // each line is assumed in the format "source1 source2 ... target", // those lines not in this format would be ignored while(getline(from, line)) { line = line.substr(0, line.find('\r')); if (line.empty() || line[0] == ';' || line[0] == '#') continue; iss.clear(); posVec.clear(); iss.str(line); while(iss >> pos) { posVec.push_back(pos); } if(posVec.size() < 2) { cerr << "ignore invalid rule: " << line << endl; cerr << "it should be \"source1 source2 ... target\"." << endl; continue; } #if JMA_DEBUG_PRINT for(it=posVec.begin(); it!=posVec.end(); ++it) { cout << *it << "\t"; } cout << endl; #endif RuleNode* node = ruleRoot_; bool isValid = true; for(it=posVec.begin(); it!=posVec.end()-1; ++it) { int posIndex = getIndexFromAlphaPOS(*it); if(posIndex < 0 || posIndex >= tableSize_) { isValid = false; break; } if(! node->children_[posIndex]) node->children_[posIndex] = new RuleNode(node->level_+1, tableSize_); node = node->children_[posIndex]; } int targetPOS = getIndexFromAlphaPOS(*it); if(! isValid || targetPOS < 0) { cerr << "ignore invalid rule: " << line << endl; cerr << "some POS string in this rule is unknown." << endl; continue; } // save target only if not assigned before if(node->target_ < 0) node->target_ = targetPOS; } #if JMA_DEBUG_PRINT cout << endl; #endif return true; } const RuleNode* POSTable::getCombineRule(int startPOS, const MeCab::Node* nextNode) const { assert(startPOS >=0 && nextNode); const RuleNode* ruleNode = 0; const RuleNode* result = 0; // check whether "compound.def" is loaded if(ruleRoot_) ruleNode = ruleRoot_->children_[startPOS]; const MeCab::Node* tokenNode = nextNode; while(ruleNode) { if(ruleNode->target_ >= 0) result = ruleNode; // in case of tokenNode is EOS node if(! tokenNode->next) break; ruleNode = ruleNode->children_[(int)tokenNode->posid]; tokenNode = tokenNode->next; } #if JMA_DEBUG_PRINT_COMBINE if(result) cout << result->level_ << " POSes are combined into " << strTableVec_[POS_FORMAT_ALPHABET][result->target_] << endl; else cout << "no rule found" << endl; #endif return result; } int POSTable::getIndexFromAlphaPOS(const std::string& posStr) const { map<std::string, int>::const_iterator it = alphaPOSMap_.find(posStr); if(it != alphaPOSMap_.end()) return it->second; return -1; } } // namespace jma
izenecloud/ijma
source/src/pos_table.cpp
C++
apache-2.0
7,723
using System.Reflection; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("XAML Magic")] [assembly: AssemblyDescription("XAML Magic is a visual studio extension, which formats XAML source code by sorting the attributes based on their importance. This tool can help you/your team maintain a better XAML coding style as well as a much better XAML readability.")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("Grochocki")] [assembly: AssemblyProduct("XAML Magic")] [assembly: AssemblyCopyright("Copyright © 2016")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("4b0efa15-b5d1-42bc-bb9f-3c3faf7a04ec")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.2.1.0")] [assembly: AssemblyFileVersion("1.2.1.0")]
grochocki/XamlMagic
Service/Properties/AssemblyInfo.cs
C#
apache-2.0
1,599
namespace MC.MCRF.NLP.DocumentAbstraction { partial class AorticAneurysmAssessment { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); } #region Component Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.chkInvalidKeywords = new System.Windows.Forms.CheckBox(); this.chkNegation = new System.Windows.Forms.CheckBox(); this.chkMeasurementDescriptors = new System.Windows.Forms.CheckBox(); this.chkLocation = new System.Windows.Forms.CheckBox(); this.chkMeasurement = new System.Windows.Forms.CheckBox(); this.chkExplicitMention = new System.Windows.Forms.CheckBox(); this.chkKeywordsRelated = new System.Windows.Forms.CheckBox(); this.lstUnrelatedTerms = new System.Windows.Forms.ListBox(); this.lblUnrelated = new System.Windows.Forms.Label(); this.cmdClearSelection = new System.Windows.Forms.Button(); this.radSubjectPatient = new System.Windows.Forms.RadioButton(); this.lblSubject = new System.Windows.Forms.Label(); this.radSubjectFamilyMember = new System.Windows.Forms.RadioButton(); this.radSubjectNone = new System.Windows.Forms.RadioButton(); this.SuspendLayout(); // // chkInvalidKeywords // this.chkInvalidKeywords.AutoSize = true; this.chkInvalidKeywords.Enabled = false; this.chkInvalidKeywords.Location = new System.Drawing.Point(318, 67); this.chkInvalidKeywords.Name = "chkInvalidKeywords"; this.chkInvalidKeywords.Size = new System.Drawing.Size(294, 17); this.chkInvalidKeywords.TabIndex = 27; this.chkInvalidKeywords.Text = "Not &all of the keywords are related to the aortic aneurysm"; this.chkInvalidKeywords.UseVisualStyleBackColor = true; this.chkInvalidKeywords.CheckedChanged += new System.EventHandler(this.chkInvalidKeywords_CheckedChanged); // // chkNegation // this.chkNegation.AutoSize = true; this.chkNegation.Enabled = false; this.chkNegation.Location = new System.Drawing.Point(18, 67); this.chkNegation.Name = "chkNegation"; this.chkNegation.Size = new System.Drawing.Size(294, 17); this.chkNegation.TabIndex = 23; this.chkNegation.Text = "The aortic aneurysm is negated (does not e&xist/ruled out)"; this.chkNegation.UseVisualStyleBackColor = true; // // chkMeasurementDescriptors // this.chkMeasurementDescriptors.AutoSize = true; this.chkMeasurementDescriptors.Enabled = false; this.chkMeasurementDescriptors.Location = new System.Drawing.Point(318, 44); this.chkMeasurementDescriptors.Name = "chkMeasurementDescriptors"; this.chkMeasurementDescriptors.Size = new System.Drawing.Size(311, 17); this.chkMeasurementDescriptors.TabIndex = 26; this.chkMeasurementDescriptors.Text = "&Descriptors about the aneurysm measurement are mentioned"; this.chkMeasurementDescriptors.UseVisualStyleBackColor = true; // // chkLocation // this.chkLocation.AutoSize = true; this.chkLocation.Enabled = false; this.chkLocation.Location = new System.Drawing.Point(18, 44); this.chkLocation.Name = "chkLocation"; this.chkLocation.Size = new System.Drawing.Size(225, 17); this.chkLocation.TabIndex = 24; this.chkLocation.Text = "The &location of the aneurysm is mentioned"; this.chkLocation.UseVisualStyleBackColor = true; // // chkMeasurement // this.chkMeasurement.AutoSize = true; this.chkMeasurement.Enabled = false; this.chkMeasurement.Location = new System.Drawing.Point(318, 23); this.chkMeasurement.Name = "chkMeasurement"; this.chkMeasurement.Size = new System.Drawing.Size(287, 17); this.chkMeasurement.TabIndex = 25; this.chkMeasurement.Text = "A n&umerical measurement of the aneurysm is mentioned"; this.chkMeasurement.UseVisualStyleBackColor = true; // // chkExplicitMention // this.chkExplicitMention.AutoSize = true; this.chkExplicitMention.Enabled = false; this.chkExplicitMention.Location = new System.Drawing.Point(18, 23); this.chkExplicitMention.Name = "chkExplicitMention"; this.chkExplicitMention.Size = new System.Drawing.Size(205, 17); this.chkExplicitMention.TabIndex = 22; this.chkExplicitMention.Text = "Aortic aneurysm is explicitly &mentioned"; this.chkExplicitMention.UseVisualStyleBackColor = true; // // chkKeywordsRelated // this.chkKeywordsRelated.AutoSize = true; this.chkKeywordsRelated.Location = new System.Drawing.Point(0, 0); this.chkKeywordsRelated.Name = "chkKeywordsRelated"; this.chkKeywordsRelated.Size = new System.Drawing.Size(367, 17); this.chkKeywordsRelated.TabIndex = 21; this.chkKeywordsRelated.Text = "&Keywords are related to aortic aneurysm (description, measurement, etc.)"; this.chkKeywordsRelated.UseVisualStyleBackColor = true; this.chkKeywordsRelated.CheckedChanged += new System.EventHandler(this.chkKeywordsRelated_CheckedChanged); // // lstUnrelatedTerms // this.lstUnrelatedTerms.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom) | System.Windows.Forms.AnchorStyles.Left))); this.lstUnrelatedTerms.Enabled = false; this.lstUnrelatedTerms.FormattingEnabled = true; this.lstUnrelatedTerms.Location = new System.Drawing.Point(423, 90); this.lstUnrelatedTerms.Name = "lstUnrelatedTerms"; this.lstUnrelatedTerms.SelectionMode = System.Windows.Forms.SelectionMode.MultiSimple; this.lstUnrelatedTerms.Size = new System.Drawing.Size(206, 82); this.lstUnrelatedTerms.TabIndex = 29; // // lblUnrelated // this.lblUnrelated.AutoSize = true; this.lblUnrelated.Enabled = false; this.lblUnrelated.Location = new System.Drawing.Point(337, 91); this.lblUnrelated.Name = "lblUnrelated"; this.lblUnrelated.Size = new System.Drawing.Size(84, 13); this.lblUnrelated.TabIndex = 30; this.lblUnrelated.Text = "Unrelated terms:"; // // cmdClearSelection // this.cmdClearSelection.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left))); this.cmdClearSelection.Enabled = false; this.cmdClearSelection.Location = new System.Drawing.Point(362, 149); this.cmdClearSelection.Name = "cmdClearSelection"; this.cmdClearSelection.Size = new System.Drawing.Size(55, 23); this.cmdClearSelection.TabIndex = 31; this.cmdClearSelection.Text = "&Reset"; this.cmdClearSelection.UseVisualStyleBackColor = true; this.cmdClearSelection.Click += new System.EventHandler(this.cmdClearSelection_Click); // // radSubjectPatient // this.radSubjectPatient.AutoSize = true; this.radSubjectPatient.Checked = true; this.radSubjectPatient.Enabled = false; this.radSubjectPatient.Location = new System.Drawing.Point(31, 113); this.radSubjectPatient.Name = "radSubjectPatient"; this.radSubjectPatient.Size = new System.Drawing.Size(58, 17); this.radSubjectPatient.TabIndex = 32; this.radSubjectPatient.TabStop = true; this.radSubjectPatient.Text = "Patient"; this.radSubjectPatient.UseVisualStyleBackColor = true; // // lblSubject // this.lblSubject.AutoSize = true; this.lblSubject.Enabled = false; this.lblSubject.Location = new System.Drawing.Point(15, 97); this.lblSubject.Name = "lblSubject"; this.lblSubject.Size = new System.Drawing.Size(150, 13); this.lblSubject.TabIndex = 33; this.lblSubject.Text = "Subject of the aortic aneurysm"; // // radSubjectFamilyMember // this.radSubjectFamilyMember.AutoSize = true; this.radSubjectFamilyMember.Enabled = false; this.radSubjectFamilyMember.Location = new System.Drawing.Point(31, 130); this.radSubjectFamilyMember.Name = "radSubjectFamilyMember"; this.radSubjectFamilyMember.Size = new System.Drawing.Size(95, 17); this.radSubjectFamilyMember.TabIndex = 34; this.radSubjectFamilyMember.Text = "Family Member"; this.radSubjectFamilyMember.UseVisualStyleBackColor = true; // // radSubjectNone // this.radSubjectNone.AutoSize = true; this.radSubjectNone.Enabled = false; this.radSubjectNone.Location = new System.Drawing.Point(31, 146); this.radSubjectNone.Name = "radSubjectNone"; this.radSubjectNone.Size = new System.Drawing.Size(115, 17); this.radSubjectNone.TabIndex = 35; this.radSubjectNone.Text = "None/Hypothetical"; this.radSubjectNone.UseVisualStyleBackColor = true; // // AorticAneurysmAssessment // this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F); this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; this.Controls.Add(this.radSubjectNone); this.Controls.Add(this.radSubjectFamilyMember); this.Controls.Add(this.lblSubject); this.Controls.Add(this.radSubjectPatient); this.Controls.Add(this.cmdClearSelection); this.Controls.Add(this.lblUnrelated); this.Controls.Add(this.lstUnrelatedTerms); this.Controls.Add(this.chkInvalidKeywords); this.Controls.Add(this.chkNegation); this.Controls.Add(this.chkMeasurementDescriptors); this.Controls.Add(this.chkLocation); this.Controls.Add(this.chkMeasurement); this.Controls.Add(this.chkExplicitMention); this.Controls.Add(this.chkKeywordsRelated); this.Name = "AorticAneurysmAssessment"; this.Size = new System.Drawing.Size(644, 181); this.ResumeLayout(false); this.PerformLayout(); } #endregion private System.Windows.Forms.CheckBox chkInvalidKeywords; private System.Windows.Forms.CheckBox chkNegation; private System.Windows.Forms.CheckBox chkMeasurementDescriptors; private System.Windows.Forms.CheckBox chkLocation; private System.Windows.Forms.CheckBox chkMeasurement; private System.Windows.Forms.CheckBox chkExplicitMention; private System.Windows.Forms.CheckBox chkKeywordsRelated; private System.Windows.Forms.ListBox lstUnrelatedTerms; private System.Windows.Forms.Label lblUnrelated; private System.Windows.Forms.Button cmdClearSelection; private System.Windows.Forms.RadioButton radSubjectPatient; private System.Windows.Forms.Label lblSubject; private System.Windows.Forms.RadioButton radSubjectFamilyMember; private System.Windows.Forms.RadioButton radSubjectNone; } }
lrasmus/DocumentAbstraction
AorticAneurysm/AorticAneurysmAssessment.Designer.cs
C#
apache-2.0
12,098
/* +-----------------------------------------------------------------------------+ @grahamzibar presents: _______ _______ _______ _______ _______ _______ ( )( ___ )( ____ )( ____ )|\ /|( ____ \|\ /|( ____ \ | () () || ( ) || ( )|| ( )|| ) ( || ( \/| ) ( || ( \/ | || || || | | || (____)|| (____)|| (___) || (__ | | | || (_____ | |(_)| || | | || __)| _____)| ___ || __) | | | |(_____ ) | | | || | | || (\ ( | ( | ( ) || ( | | | | ) | | ) ( || (___) || ) \ \__| ) | ) ( || (____/\| (___) |/\____) | |/ \|(_______)|/ \__/|/ |/ \|(_______/(_______)\_______) * version 0.1.0 - ALPHA * https://www.github.com/grahamzibar/Morpheus * What if I told you... this is not an animation library? +-----------------------------------------------------------------------------+ */ (function MorpheusModule(_win) { if (!_win.morpheus) _win.morpheus = new Object(); var Morpheus = _win.morpheus; /* CONSTANTS */ Morpheus.LINEAR = 'linear'; Morpheus.EASE = 'ease'; Morpheus.EASE_IN = 'ease-in'; Morpheus.EASE_OUT = 'ease-out'; Morpheus.EASE_IN_OUT = 'ease-in-out'; Morpheus.EASE_OUT_IN = 'ease-out-in'; Morpheus.LEFT = 'left'; Morpheus.RIGHT = 'right'; Morpheus.CENTER = 'center'; Morpheus.THREE_D = 'preserve-3d'; Morpheus.FLAT = 'flat'; Morpheus.PIXELS = 'px'; Morpheus.PERCENT = '%'; Morpheus.RADIANS = 'rad'; Morpheus.DEGREES = 'deg'; Morpheus.TRANSFORM = '-webkit-transform'; /* CLASSES */ var Vector = function Vector(x, y, z) { this.x = x != null ? x : 0.0; this.y = y != null ? y : 0.0; this.z = z != null ? z : 0.0; }; Vector.prototype.set = function(x, y, z) { this.x = x; if (y != null) { this.y = y; if (z != null) this.z = z; } }; var Collection = function Collection() { var _collection = new Array(); var _cache = {}; this.collection = _collection; this.add = function(key, value) { var index = _cache[key]; if (typeof index === 'number') _collection[index] = value; else { index = _cache[key] = _collection.length; _collection[index] = value; } }; this.get = function(key) { var index = _cache[key]; if (!index && index !== 0) return null; return _collection[index]; }; this.remove = function(key) { if (_cache[key]) _collection.splice(_cache[key], 1); }; }; var Style = function Style(_name, _value, _unit) { this.name = _name; this.value = _value; this.unit = _unit != null ? _unit : ''; }; var StyleCollection = function StyleCollection() { this.inheritFrom = Collection; this.inheritFrom(); delete this.inheritFrom; var add = this.add; this.add = function(style) { add(style.name, style); }; }; var Transition = function Transition(_property) { this.property = _property; this.duration = 500; this.delay = 0; this.timing = Morpheus.LINEAR; }; var TransitionCollection = function TransitionCollection() { this.inheritFrom = Collection; this.inheritFrom(); delete this.inheritFrom; var add = this.add; this.add = function(transition) { add(transition.property, transition); }; }; var Transform = function Transform() { this.translate = new Vector(); this.translateUnit = 'px'; this.scale = new Vector(1, 1, 1); this.rotate = new Vector(); this.rotateUnit = 'rad'; //this.skew? this.origin = new Vector(Morpheus.CENTER, Morpheus.CENTER, Morpheus.CENTER); this.style = Morpheus.THREE_D; }; // How should the renderer work? Morpheus.Renderer = function Renderer(_el) { var _render = false; var onEnterFrame = function(e) { if (_render) { _render = false; var output = ''; var styles = _el.styles; var transitions = _el.transitions; var transform = _el.transform; if (styles) { for (var i = 0, len = styles.collection.length; i < len; i++) output += renderStyle(styles.collection[i]); } if (transitions) output += renderTransitionCollection(transitions); if (transform) output += renderTransform(transform); _el.css = output; } window.requestAnimationFrame(onEnterFrame); }; var renderStyle = function(style) { var output = style.name; output += ':'; output += style.value; output += style.unit; output += ';'; return output; }; var renderTransitionCollection = function(tc) { var output = 'transition:'; for (var i = 0, ts = tc.collection, len = ts.length; i < len; i++) { if (i) output += ', '; output += renderTransition(ts[i]); } output += ';'; return output; }; var renderTransition = function(transition) { var space = ' '; var t = transition.property; t += space; t += transition.duration; t += 'ms'; t += space; t += transition.timing; t += space; t += transition.delay; t += 'ms'; return t; }; var renderTransform = function(transform) { var space = ' '; var t = '-webkit-transform:translate3d'; t += renderVector(transform.translate, transform.translateUnit); t += space; t += 'rotateX('; t += transform.rotate.x; t += transform.rotateUnit; t += ')'; t += 'rotateY('; t += transform.rotate.y; t += transform.rotateUnit; t += ')'; t += 'rotateZ('; t += transform.rotate.z; t += transform.rotateUnit; t += ')'; t += space; t += 'scale3d'; t += renderVector(transform.scale); t += ';'; t += '-webkit-transform-origin:'; t += transform.origin.x; t += space; t += transform.origin.y; t += space; t += transform.origin.z; t += ';'; t += '-webkit-transform-style:'; t += transform.style; t += ';'; return t; }; var renderVector = function(vector, unit) { unit = unit != null ? unit : ''; var coord = '('; coord += vector.x; if (vector.x) coord += unit; coord += ', '; coord += vector.y; if (vector.y) coord += unit; coord += ', '; coord += vector.z; if (vector.z) coord += unit; coord += ')'; return coord; }; this.step = function() { _render = true; }; window.requestAnimationFrame(onEnterFrame); }; /* FUNCTIONS */ var getStyle = function(property) { if (document.defaultView && document.defaultView.getComputedStyle) return document.defaultView.getComputedStyle(this, false).getPropertyValue(property); if (this.currentStyle) { return this.currentStyle[property.replace(/\-(\w)/g, function (strMatch, p1) { return p1.toUpperCase(); })]; } return null; }; var getStyleNumber = function(property, unit) { var value = this.getStyle(property); if (unit) value = value.split(unit)[0]; return Number(value); }; var setStyle = function(property, value, unit) { if (!this.styles) this.styles = new StyleCollection(); this.styles.add(new Style(property, value, unit)); if (!this.renderer) this.renderer = new Morpheus.Renderer(this); this.renderer.step(); }; var setCSS = function(css) { if (typeof this.style.cssText != 'undefined') this.style.cssText = css; else this.setAttribute('style', css); }; var translation = function(x, y, z, unit) { if (!this.transform) this.transform = new Transform(); this.transform.translate.x = x; this.transform.translate.y = y; if (z != null) { if (arguments.length === 3 && typeof z !== 'number') this.transform.translateUnit = z; else this.transform.z = z; } if (unit) this.transform.translateUnit = unit; if (!this.renderer) this.renderer = new Morpheus.Renderer(this); this.renderer.step(); }; var scale = function(x, y, z) { if (!this.transform) this.transform = new Transform(); this.transform.scale.x = x; this.transform.scale.y = y; if (z != null) this.transform.scale.z = z; if (!this.renderer) this.renderer = new Morpheus.Renderer(this); this.renderer.step(); }; var rotate = function(x, y, z, unit) { if (!this.transform) this.transform = new Transform(); this.transform.rotate.x = x; this.transform.rotate.y = y; this.transform.rotate.z = z; if (unit) this.transform.rotateUnit = unit; if (!this.renderer) this.renderer = new Morpheus.Renderer(this); this.renderer.step(); }; var setOrigin = function(x, y, z) { if (!this.transform) this.transform = new Transform(); this.transform.origin.x = x; if (y != null) { this.transform.origin.y = y; if (z != null) this.transform.origin.z = z; } if (!this.renderer) this.renderer = new Morpheus.Renderer(this); this.renderer.step(); }; var addTransition = function(property, duration, timing, delay) { if (!this.transitions) this.transitions = new TransitionCollection(); var transition = this.transitions.get(property); if (!transition) transition = new Transition(property); if (duration) transition.duration = duration; if (timing) transition.timing = timing; if (delay) transition.delay = delay; this.transitions.add(transition); }; var removeTransition = function(property) { if (this.transitions) this.transitions.remove(property); }; /* EXTENSION */ var HTMLElement = _win.HTMLElement; HTMLElement.prototype.styles = null; HTMLElement.prototype.transitions = null; HTMLElement.prototype.transform = null; HTMLElement.prototype.__defineSetter__('css', setCSS); HTMLElement.prototype.getStyle = getStyle; HTMLElement.prototype.getStyleNumber = getStyleNumber; HTMLElement.prototype.setStyle = setStyle; HTMLElement.prototype.translation = translation; HTMLElement.prototype.scale = scale; HTMLElement.prototype.rotate = rotate; HTMLElement.prototype.setOrigin = setOrigin; HTMLElement.prototype.addTransition = addTransition; HTMLElement.prototype.removeTransition = removeTransition; if (!HTMLElement.prototype.dispatchEvent) { // implement } // THIS NEEDS TO CHANGE, YO -- should be in the renderer class. (function(window) { var time, vendor, vendors, _i, _len; time = 0; vendors = ['ms', 'moz', 'webkit', 'o']; for (_i = 0, _len = vendors.length; _i < _len; _i++) { vendor = vendors[_i]; if (!(!window.requestAnimationFrame)) continue; window.requestAnimationFrame = window[vendor + 'RequestAnimationFrame']; window.cancelAnimationFrame = window[vendor + 'CancelAnimationFrame']; } if (!window.requestAnimationFrame) { window.requestAnimationFrame = function(callback) { var delta, now, old; now = new Date().getTime(); delta = Math.max(0, 16 - (now - old)); setTimeout(function() { return callback(time + delta); }, delta); return old = now + delta; }; } if (!window.cancelAnimationFrame) { window.cancelAnimationFrame = function(id) { return clearTimeout(id); }; } })(_win); })(window); /* +-----------------------------------------------------------------------------+ written & directed by: _ _ _ ___ ___ ___| |_ ___ _____ ___|_| |_ ___ ___ | . | _| .'| | .'| |- _| | . | .'| _| |_ |_| |__,|_|_|__,|_|_|_|___|_|___|__,|_| |___| +-----------------------------------------------------------------------------+ */
grahamzibar/AirChat
src/js/ext/morpheus/Morpheus.js
JavaScript
apache-2.0
11,243
using System.Collections.Generic; using System.Runtime.CompilerServices; using System.Web; using System.Web.Caching; namespace Smart.Core.Caching { /// <summary> /// 基于当前应用程序的 System.Web.Caching.Cache 的缓存服务 /// </summary> public class HttpCache : DisposableObject, ICache { private static readonly Cache _cache = HttpRuntime.Cache; /// <summary> /// 获取缓存 /// </summary> /// <param name="key">缓存键值</param> /// <returns>检索到的缓存项,未找到该键时为 null。</returns> public object Get(string key) { var cache = _cache.Get(key); return cache; } /// <summary> /// 将对象添加到缓存,如果已经存在则更新缓存 /// </summary> /// <param name="key">缓存键值</param> /// <param name="value">缓存信息</param> /// <returns>如果添加的项之前存储在缓存中,则为表示该项的对象;否则为 null。</returns> void ICache.Set(string key, CacheInfo value) { _cache.Add(key, value.Value, null, value.AbsoluteExpiration, value.SlidingExpiration, CacheItemPriority.Default, null); } /// <summary> /// 从缓存中移除指定项 /// </summary> /// <typeparam name="T">缓存数据类型</typeparam> /// <param name="key">缓存键值</param> [MethodImpl(MethodImplOptions.Synchronized)] public void Remove(string key) { _cache.Remove(key); } /// <summary> /// 从缓存中移除全部满足条件的项 /// </summary> /// <param name="match">移除条件</param> [MethodImpl(MethodImplOptions.Synchronized)] public IEnumerable<string> GetAllKeys() { var caches = _cache.GetEnumerator(); while (caches.MoveNext()) { yield return caches.Key.ToString(); } } } }
SmallAnts/Smart
Src/Framework/Smart.Core.Shared/Caching/HttpCache.cs
C#
apache-2.0
2,071
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.shardingsphere.elasticjob.cloud.console; import org.apache.shardingsphere.elasticjob.cloud.console.config.advice.ConsoleExceptionHandler; import org.apache.shardingsphere.elasticjob.cloud.console.controller.CloudAppController; import org.apache.shardingsphere.elasticjob.cloud.console.controller.CloudJobController; import org.apache.shardingsphere.elasticjob.cloud.console.controller.CloudOperationController; import org.apache.shardingsphere.elasticjob.cloud.scheduler.env.RestfulServerConfiguration; import org.apache.shardingsphere.elasticjob.cloud.scheduler.mesos.ReconcileService; import org.apache.shardingsphere.elasticjob.cloud.scheduler.producer.ProducerManager; import org.apache.shardingsphere.elasticjob.reg.base.CoordinatorRegistryCenter; import org.apache.shardingsphere.elasticjob.restful.NettyRestfulService; import org.apache.shardingsphere.elasticjob.restful.NettyRestfulServiceConfiguration; import org.apache.shardingsphere.elasticjob.restful.RestfulService; /** * Console bootstrap for Cloud. */ public class ConsoleBootstrap { private final RestfulService restfulService; public ConsoleBootstrap(final CoordinatorRegistryCenter regCenter, final RestfulServerConfiguration config, final ProducerManager producerManager, final ReconcileService reconcileService) { CloudJobController.init(regCenter, producerManager); CloudAppController.init(regCenter, producerManager); CloudOperationController.init(regCenter, reconcileService); NettyRestfulServiceConfiguration restfulServiceConfiguration = new NettyRestfulServiceConfiguration(config.getPort()); restfulServiceConfiguration.addControllerInstance(new CloudJobController(), new CloudAppController(), new CloudOperationController()); restfulServiceConfiguration.addExceptionHandler(Exception.class, new ConsoleExceptionHandler()); restfulService = new NettyRestfulService(restfulServiceConfiguration); } /** * Startup RESTful server. */ public void start() { restfulService.startup(); } /** * Stop RESTful server. */ public void stop() { restfulService.shutdown(); } }
dangdangdotcom/elastic-job
elasticjob-cloud/elasticjob-cloud-scheduler/src/main/java/org/apache/shardingsphere/elasticjob/cloud/console/ConsoleBootstrap.java
Java
apache-2.0
3,014
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.cache.client.internal.pooling; import static java.util.concurrent.TimeUnit.NANOSECONDS; import static org.apache.geode.distributed.ConfigurationProperties.LOCATORS; import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT; import static org.apache.geode.internal.logging.LogWriterLevel.FINEST; import static org.apache.geode.test.awaitility.GeodeAwaitility.await; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.Assert.fail; import java.io.InputStream; import java.io.OutputStream; import java.net.Socket; import java.nio.ByteBuffer; import java.util.Collections; import java.util.Properties; import java.util.Set; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BooleanSupplier; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.CancelCriterion; import org.apache.geode.cache.CacheClosedException; import org.apache.geode.cache.client.AllConnectionsInUseException; import org.apache.geode.cache.client.NoAvailableServersException; import org.apache.geode.cache.client.internal.ClientUpdater; import org.apache.geode.cache.client.internal.Connection; import org.apache.geode.cache.client.internal.ConnectionFactory; import org.apache.geode.cache.client.internal.ConnectionStats; import org.apache.geode.cache.client.internal.Endpoint; import org.apache.geode.cache.client.internal.EndpointManager; import org.apache.geode.cache.client.internal.EndpointManagerImpl; import org.apache.geode.cache.client.internal.Op; import org.apache.geode.cache.client.internal.QueueManager; import org.apache.geode.cache.client.internal.ServerDenyList; import org.apache.geode.distributed.DistributedMember; import org.apache.geode.distributed.DistributedSystem; import org.apache.geode.distributed.internal.ServerLocation; import org.apache.geode.distributed.internal.membership.InternalDistributedMember; import org.apache.geode.internal.cache.PoolStats; import org.apache.geode.internal.cache.tier.sockets.ServerQueueStatus; import org.apache.geode.internal.logging.InternalLogWriter; import org.apache.geode.internal.logging.LocalLogWriter; import org.apache.geode.test.awaitility.GeodeAwaitility; import org.apache.geode.test.dunit.ThreadUtils; import org.apache.geode.test.dunit.WaitCriterion; import org.apache.geode.test.junit.categories.ClientServerTest; @Category({ClientServerTest.class}) public class ConnectionManagerJUnitTest { private static final long TIMEOUT_MILLIS = 30 * 1000; // Some machines do not have a monotonic clock. private static final long ALLOWABLE_ERROR_IN_MILLIS = 100; ConnectionManager manager; private InternalLogWriter logger; protected DummyFactory factory; private DistributedSystem ds; private ScheduledExecutorService background; protected EndpointManager endpointManager; private CancelCriterion cancelCriterion; private PoolStats poolStats; @Before public void setUp() { this.logger = new LocalLogWriter(FINEST.intLevel(), System.out); factory = new DummyFactory(); Properties properties = new Properties(); properties.put(MCAST_PORT, "0"); properties.put(LOCATORS, ""); ds = DistributedSystem.connect(properties); background = Executors.newSingleThreadScheduledExecutor(); poolStats = new PoolStats(ds, "connectionManagerJUnitTest"); endpointManager = new EndpointManagerImpl("pool", ds, ds.getCancelCriterion(), poolStats); cancelCriterion = new CancelCriterion() { @Override public String cancelInProgress() { return null; } @Override public RuntimeException generateCancelledException(Throwable e) { return null; } }; } @After public void tearDown() throws InterruptedException { ds.disconnect(); if (manager != null) { manager.close(false); } background.shutdownNow(); } @Test public void testAddVarianceToInterval() { assertThat(ConnectionManagerImpl.addVarianceToInterval(0)).as("Zero gets zero variance") .isEqualTo(0); assertThat(ConnectionManagerImpl.addVarianceToInterval(300000)) .as("Large value gets +/-10% variance").isNotEqualTo(300000).isGreaterThanOrEqualTo(270000) .isLessThanOrEqualTo(330000); assertThat(ConnectionManagerImpl.addVarianceToInterval(9)).as("Small value gets +/-1 variance") .isNotEqualTo(9).isGreaterThanOrEqualTo(8).isLessThanOrEqualTo(10); } @Test public void testGet() throws InterruptedException, AllConnectionsInUseException, NoAvailableServersException { manager = new ConnectionManagerImpl("pool", factory, endpointManager, 3, 0, -1, -1, logger, 60 * 1000, cancelCriterion, poolStats); manager.start(background); Connection conn[] = new Connection[4]; conn[0] = manager.borrowConnection(0); Assert.assertEquals(1, factory.creates); manager.returnConnection(conn[0]); conn[0] = manager.borrowConnection(0); Assert.assertEquals(1, factory.creates); conn[1] = manager.borrowConnection(0); manager.returnConnection(conn[0]); manager.returnConnection(conn[1]); Assert.assertEquals(2, factory.creates); conn[0] = manager.borrowConnection(0); conn[1] = manager.borrowConnection(0); conn[2] = manager.borrowConnection(0); Assert.assertEquals(3, factory.creates); try { conn[4] = manager.borrowConnection(10); fail("Should have received an all connections in use exception"); } catch (AllConnectionsInUseException e) { // expected exception } } @Test public void testPrefill() throws InterruptedException { manager = new ConnectionManagerImpl("pool", factory, endpointManager, 10, 2, -1, -1, logger, 60 * 1000, cancelCriterion, poolStats); manager.start(background); final String descrip = manager.toString(); WaitCriterion ev = new WaitCriterion() { @Override public boolean done() { return factory.creates == 2 && factory.destroys == 0; } @Override public String description() { return "waiting for manager " + descrip; } }; GeodeAwaitility.await().untilAsserted(ev); } @Test public void testInvalidateConnection() throws InterruptedException, AllConnectionsInUseException, NoAvailableServersException { manager = new ConnectionManagerImpl("pool", factory, endpointManager, 10, 0, 0L, -1, logger, 60 * 1000, cancelCriterion, poolStats); manager.start(background); Connection conn = manager.borrowConnection(0); Assert.assertEquals(1, factory.creates); Assert.assertEquals(0, factory.destroys); conn.destroy(); manager.returnConnection(conn); Assert.assertEquals(1, factory.creates); Assert.assertEquals(1, factory.destroys); conn = manager.borrowConnection(0); Assert.assertEquals(2, factory.creates); Assert.assertEquals(1, factory.destroys); } @Test public void testInvalidateServer() throws InterruptedException, AllConnectionsInUseException, NoAvailableServersException { manager = new ConnectionManagerImpl("pool", factory, endpointManager, 10, 0, -1, -1, logger, 60 * 1000, cancelCriterion, poolStats); manager.start(background); ServerLocation server1 = new ServerLocation("localhost", 1); ServerLocation server2 = new ServerLocation("localhost", 2); factory.nextServer = server1; Connection conn1 = manager.borrowConnection(0); Connection conn2 = manager.borrowConnection(0); Connection conn3 = manager.borrowConnection(0); factory.nextServer = server2; Connection conn4 = manager.borrowConnection(0); Assert.assertEquals(4, factory.creates); Assert.assertEquals(0, factory.destroys); manager.returnConnection(conn2); endpointManager.serverCrashed(conn2.getEndpoint()); Assert.assertEquals(3, factory.destroys); conn1.destroy(); manager.returnConnection(conn1); Assert.assertEquals(3, factory.destroys); manager.returnConnection(conn3); manager.returnConnection(conn4); Assert.assertEquals(3, factory.destroys); manager.borrowConnection(0); Assert.assertEquals(4, factory.creates); Assert.assertEquals(3, factory.destroys); } @Test public void testIdleExpiration() throws InterruptedException, AllConnectionsInUseException, NoAvailableServersException { final long idleTimeoutMillis = 300; manager = new ConnectionManagerImpl("pool", factory, endpointManager, 5, 2, idleTimeoutMillis, -1, logger, 60 * 1000, cancelCriterion, poolStats); manager.start(background); { factory.waitWhile(() -> factory.creates < 2); Assert.assertEquals(2, factory.creates); Assert.assertEquals(0, factory.destroys); Assert.assertEquals(0, factory.closes); Assert.assertEquals(0, poolStats.getIdleExpire()); // no need to wait; dangerous because it gives connections a chance to expire // //wait for prefill task to finish. // Thread.sleep(100); } Connection conn1 = manager.borrowConnection(500); Connection conn2 = manager.borrowConnection(500); Connection conn3 = manager.borrowConnection(500); Connection conn4 = manager.borrowConnection(500); Connection conn5 = manager.borrowConnection(500); // wait to make sure checked out connections aren't timed out Thread.sleep(idleTimeoutMillis * 2); Assert.assertEquals(5, factory.creates); Assert.assertEquals(0, factory.destroys); Assert.assertEquals(0, factory.closes); Assert.assertEquals(0, poolStats.getIdleExpire()); { // make sure a connection that has been passivated can idle-expire conn1.passivate(true); long elapsedMillis = factory.waitWhile(() -> factory.destroys < 1); Assert.assertEquals(5, factory.creates); Assert.assertEquals(1, factory.destroys); Assert.assertEquals(1, factory.closes); Assert.assertEquals(1, poolStats.getIdleExpire()); checkIdleTimeout(idleTimeoutMillis, elapsedMillis); } // now return all other connections to pool and verify that just 2 expire manager.returnConnection(conn2); manager.returnConnection(conn3); manager.returnConnection(conn4); manager.returnConnection(conn5); { long elapsedMillis = factory.waitWhile(() -> factory.destroys < 3); Assert.assertEquals(5, factory.creates); Assert.assertEquals(3, factory.destroys); Assert.assertEquals(3, factory.closes); Assert.assertEquals(3, poolStats.getIdleExpire()); checkIdleTimeout(idleTimeoutMillis, elapsedMillis); } // wait to make sure min-connections don't time out Thread.sleep(idleTimeoutMillis * 2); Assert.assertEquals(5, factory.creates); Assert.assertEquals(3, factory.destroys); Assert.assertEquals(3, factory.closes); Assert.assertEquals(3, poolStats.getIdleExpire()); } private void checkIdleTimeout(final long idleTimeoutMillis, long elapsedMillis) { Assert.assertTrue( "Elapsed " + elapsedMillis + " is less than idle timeout " + idleTimeoutMillis, elapsedMillis >= (idleTimeoutMillis - ALLOWABLE_ERROR_IN_MILLIS)); Assert.assertTrue( "Elapsed " + elapsedMillis + " is greater than idle timeout " + idleTimeoutMillis, elapsedMillis <= (idleTimeoutMillis + ALLOWABLE_ERROR_IN_MILLIS)); } @Test public void testBug41516() throws InterruptedException, AllConnectionsInUseException, NoAvailableServersException { final long idleTimeoutMillis = 300; final long BORROW_TIMEOUT_MILLIS = 500; manager = new ConnectionManagerImpl("pool", factory, endpointManager, 2, 1, idleTimeoutMillis, -1, logger, 60 * 1000, cancelCriterion, poolStats); manager.start(background); Connection conn1 = manager.borrowConnection(BORROW_TIMEOUT_MILLIS); Connection conn2 = manager.borrowConnection(BORROW_TIMEOUT_MILLIS); // Return some connections, let them idle expire manager.returnConnection(conn1); manager.returnConnection(conn2); { long elapsedMillis = factory.waitWhile(() -> factory.destroys < 1); Assert.assertEquals(1, factory.destroys); Assert.assertEquals(1, factory.closes); Assert.assertEquals(1, poolStats.getIdleExpire()); Assert.assertTrue( "Elapsed " + elapsedMillis + " is less than idle timeout " + idleTimeoutMillis, elapsedMillis + ALLOWABLE_ERROR_IN_MILLIS >= idleTimeoutMillis); } // Ok, now get some connections that fill our queue Connection ping1 = manager.borrowConnection(new ServerLocation("localhost", 5), false); Connection ping2 = manager.borrowConnection(new ServerLocation("localhost", 5), false); manager.returnConnection(ping1); manager.returnConnection(ping2); manager.borrowConnection(BORROW_TIMEOUT_MILLIS); manager.borrowConnection(BORROW_TIMEOUT_MILLIS); long startNanos = nowNanos(); try { manager.borrowConnection(BORROW_TIMEOUT_MILLIS); fail("Didn't get an exception"); } catch (AllConnectionsInUseException e) { // expected } long elapsedMillis = elapsedMillis(startNanos); Assert.assertTrue("Elapsed = " + elapsedMillis, elapsedMillis >= BORROW_TIMEOUT_MILLIS - ALLOWABLE_ERROR_IN_MILLIS); } @Test public void testLifetimeExpiration() throws InterruptedException, AllConnectionsInUseException, NoAvailableServersException, Throwable { int lifetimeTimeout = 500; manager = new ConnectionManagerImpl("pool", factory, endpointManager, 2, 2, -1, lifetimeTimeout, logger, 60 * 1000, cancelCriterion, poolStats); manager.start(background); { factory.waitWhile(() -> factory.creates < 2); Assert.assertEquals(2, factory.creates); Assert.assertEquals(0, factory.destroys); Assert.assertEquals(0, factory.finds); } // need to start a thread that keeps the connections busy // so that their last access time keeps changing AtomicReference exception = new AtomicReference(); int updaterCount = 2; UpdaterThread[] updaters = new UpdaterThread[updaterCount]; for (int i = 0; i < updaterCount; i++) { updaters[i] = new UpdaterThread(null, exception, i, (lifetimeTimeout / 10) * 2); } for (int i = 0; i < updaterCount; i++) { updaters[i].start(); } { long durationMillis = factory.waitWhile(() -> factory.finds < 2); Assert.assertEquals(2, factory.finds); // server shouldn't have changed so no increase in creates or destroys Assert.assertEquals(2, factory.creates); Assert.assertEquals(0, factory.destroys); Assert.assertEquals(0, factory.closes); Assert.assertTrue("took too long to expire lifetime; expected=" + lifetimeTimeout + " but took=" + durationMillis, durationMillis < lifetimeTimeout * 5); } for (int i = 0; i < updaterCount; i++) { ThreadUtils.join(updaters[i], 30 * 1000); } if (exception.get() != null) { throw (Throwable) exception.get(); } for (int i = 0; i < updaterCount; i++) { Assert.assertFalse("Updater [" + i + "] is still running", updaters[i].isAlive()); } } @Test public void testExclusiveConnectionAccess() throws Throwable { manager = new ConnectionManagerImpl("pool", factory, endpointManager, 1, 0, -1, -1, logger, 60 * 1000, cancelCriterion, poolStats); manager.start(background); AtomicReference exception = new AtomicReference(); AtomicBoolean haveConnection = new AtomicBoolean(); int updaterCount = 10; UpdaterThread[] updaters = new UpdaterThread[updaterCount]; for (int i = 0; i < updaterCount; i++) { updaters[i] = new UpdaterThread(haveConnection, exception, i); } for (int i = 0; i < updaterCount; i++) { updaters[i].start(); } for (int i = 0; i < updaterCount; i++) { ThreadUtils.join(updaters[i], 30 * 1000); } if (exception.get() != null) { throw (Throwable) exception.get(); } for (int i = 0; i < updaterCount; i++) { Assert.assertFalse("Updater [" + i + "] is still running", updaters[i].isAlive()); } } @Test public void testClose() throws AllConnectionsInUseException, NoAvailableServersException, InterruptedException { manager = new ConnectionManagerImpl("pool", factory, endpointManager, 10, 0, -1, -1, logger, 60 * 1000, cancelCriterion, poolStats); manager.start(background); Connection conn1 = manager.borrowConnection(0); manager.borrowConnection(0); manager.returnConnection(conn1); Assert.assertEquals(2, factory.creates); Assert.assertEquals(0, factory.destroys); manager.close(false); Assert.assertEquals(2, factory.closes); Assert.assertEquals(2, factory.destroys); } @Test public void testExchangeConnection() throws Exception { manager = new ConnectionManagerImpl("pool", factory, endpointManager, 2, 0, -1, -1, logger, 60 * 1000, cancelCriterion, poolStats); manager.start(background); Connection conn1 = manager.borrowConnection(10); Connection conn2 = manager.borrowConnection(10); try { manager.borrowConnection(10); fail("Exepected no servers available"); } catch (AllConnectionsInUseException e) { // expected } Assert.assertEquals(2, factory.creates); Assert.assertEquals(0, factory.destroys); Assert.assertEquals(2, manager.getConnectionCount()); Connection conn3 = manager.exchangeConnection(conn1, Collections.emptySet()); Assert.assertEquals(3, factory.creates); Assert.assertEquals(1, factory.destroys); Assert.assertEquals(2, manager.getConnectionCount()); manager.returnConnection(conn2); Assert.assertEquals(3, factory.creates); Assert.assertEquals(1, factory.destroys); Assert.assertEquals(2, manager.getConnectionCount()); Connection conn4 = manager.exchangeConnection(conn3, Collections.singleton(conn3.getServer())); Assert.assertEquals(4, factory.creates); Assert.assertEquals(2, factory.destroys); Assert.assertEquals(2, manager.getConnectionCount()); manager.returnConnection(conn4); } /** * This tests that a deadlock between connection formation and connection pool closing has been * fixed. See GEODE-4615 */ @Test public void testThatMapCloseCausesCacheClosedException() throws Exception { final ConnectionManagerImpl connectionManager = new ConnectionManagerImpl("pool", factory, endpointManager, 2, 0, -1, -1, logger, 60 * 1000, cancelCriterion, poolStats); manager = connectionManager; connectionManager.start(background); final ConnectionManagerImpl.ConnectionMap connectionMap = connectionManager.allConnectionsMap; final int thread1 = 0; final int thread2 = 1; final boolean[] ready = new boolean[2]; Thread thread = new Thread("ConnectionManagerJUnitTest thread") { @Override public void run() { setReady(ready, thread1); waitUntilReady(ready, thread2); connectionMap.close(false); } }; thread.setDaemon(true); thread.start(); try { Connection firstConnection = connectionManager.borrowConnection(0); synchronized (firstConnection) { setReady(ready, thread2); waitUntilReady(ready, thread1); // the other thread will now try to close the connection map but it will block // because this thread has locked one of the connections await().until(() -> connectionMap.closing); try { connectionManager.borrowConnection(0); fail("expected a CacheClosedException"); } catch (CacheClosedException e) { // expected } } } finally { if (thread.isAlive()) { System.out.println("stopping background thread"); thread.interrupt(); thread.join(); } } } private void setReady(boolean[] ready, int index) { System.out.println( Thread.currentThread().getName() + ": setting that thread" + (index + 1) + " is ready"); synchronized (ready) { ready[index] = true; } } private void waitUntilReady(boolean[] ready, int index) { System.out.println( Thread.currentThread().getName() + ": waiting for thread" + (index + 1) + " to be ready"); await().until(() -> { synchronized (ready) { return (ready[index]); } }); } private long nowNanos() { return System.nanoTime(); } private long elapsedNanos(long startNanos) { return nowNanos() - startNanos; } private long elapsedMillis(long startNanos) { return NANOSECONDS.toMillis(elapsedNanos(startNanos)); } @Test public void testBlocking() throws Throwable { manager = new ConnectionManagerImpl("pool", factory, endpointManager, 1, 0, -1, -1, logger, 60 * 1000, cancelCriterion, poolStats); manager.start(background); final Connection conn1 = manager.borrowConnection(10); long BORROW_TIMEOUT_MILLIS = 300; long startNonos = nowNanos(); try { manager.borrowConnection(BORROW_TIMEOUT_MILLIS); fail("Should have received no servers available"); } catch (AllConnectionsInUseException expected) { } long elapsedMillis = elapsedMillis(startNonos); Assert.assertTrue( "Should have blocked for " + BORROW_TIMEOUT_MILLIS + " millis for a connection", elapsedMillis >= BORROW_TIMEOUT_MILLIS - ALLOWABLE_ERROR_IN_MILLIS); Thread returnThread = new Thread() { @Override public void run() { try { Thread.sleep(50); } catch (InterruptedException e) { fail("interrupted"); } manager.returnConnection(conn1); } }; returnThread.start(); BORROW_TIMEOUT_MILLIS = 5000; startNonos = nowNanos(); Connection conn2 = manager.borrowConnection(BORROW_TIMEOUT_MILLIS); elapsedMillis = elapsedMillis(startNonos); Assert.assertTrue( "Should have blocked for less than " + BORROW_TIMEOUT_MILLIS + " milliseconds", elapsedMillis < BORROW_TIMEOUT_MILLIS + ALLOWABLE_ERROR_IN_MILLIS); manager.returnConnection(conn2); final Connection conn3 = manager.borrowConnection(10); Thread invalidateThread = new Thread() { @Override public void run() { try { Thread.sleep(50); } catch (InterruptedException e) { fail("interrupted"); } conn3.destroy(); manager.returnConnection(conn3); } }; invalidateThread.start(); startNonos = nowNanos(); conn2 = manager.borrowConnection(BORROW_TIMEOUT_MILLIS); elapsedMillis = elapsedMillis(startNonos); Assert.assertTrue( "Should have blocked for less than " + BORROW_TIMEOUT_MILLIS + " milliseconds", elapsedMillis < BORROW_TIMEOUT_MILLIS + ALLOWABLE_ERROR_IN_MILLIS); manager.returnConnection(conn2); final Connection conn4 = manager.borrowConnection(10); Thread invalidateThread2 = new Thread() { @Override public void run() { try { Thread.sleep(50); } catch (InterruptedException e) { fail("interrupted"); } endpointManager.serverCrashed(conn4.getEndpoint()); manager.returnConnection(conn4); } }; invalidateThread2.start(); startNonos = nowNanos(); conn2 = manager.borrowConnection(BORROW_TIMEOUT_MILLIS); elapsedMillis = elapsedMillis(startNonos); Assert.assertTrue( "Should have blocked for less than " + BORROW_TIMEOUT_MILLIS + " milliseconds", elapsedMillis < BORROW_TIMEOUT_MILLIS + ALLOWABLE_ERROR_IN_MILLIS); manager.returnConnection(conn2); } @Test public void testExplicitServer() throws Exception { manager = new ConnectionManagerImpl("pool", factory, endpointManager, 1, 0, -1, -1, logger, 60 * 1000, cancelCriterion, poolStats); manager.start(background); Connection conn1 = manager.borrowConnection(0); try { manager.borrowConnection(10); fail("Should have received an error"); } catch (AllConnectionsInUseException expected) { // do nothing } Connection conn3 = manager.borrowConnection(new ServerLocation("localhost", -2), false); Assert.assertEquals(2, factory.creates); Assert.assertEquals(0, factory.destroys); Assert.assertEquals(0, factory.closes); manager.returnConnection(conn3); Assert.assertEquals(2, factory.creates); Assert.assertEquals(1, factory.destroys); Assert.assertEquals(1, factory.closes); manager.returnConnection(conn1); Assert.assertEquals(2, factory.creates); Assert.assertEquals(1, factory.destroys); Assert.assertEquals(1, factory.closes); } private class UpdaterThread extends Thread { private AtomicReference exception; private final AtomicBoolean haveConnection; private int id; private final int iterations; public UpdaterThread(AtomicBoolean haveConnection, AtomicReference exception, int id) { this(haveConnection, exception, id, 10); } public UpdaterThread(AtomicBoolean haveConnection, AtomicReference exception, int id, int iterations) { this.haveConnection = haveConnection; this.exception = exception; this.id = id; this.iterations = iterations; } private Connection borrow(int i) { long startNanos = nowNanos(); long BORROW_TIMEOUT_MILLIS = 2000; Connection conn = manager.borrowConnection(BORROW_TIMEOUT_MILLIS); if (haveConnection != null) { Assert.assertTrue("Updater[" + id + "] loop[" + i + "] Someone else has the connection!", haveConnection.compareAndSet(false, true)); } long elapsedMillis = elapsedMillis(startNanos); Assert.assertTrue("Elapsed time (" + elapsedMillis + ") >= " + BORROW_TIMEOUT_MILLIS, elapsedMillis < BORROW_TIMEOUT_MILLIS + ALLOWABLE_ERROR_IN_MILLIS); return conn; } @Override public void run() { int i = 0; Connection conn = null; try { for (i = 0; i < iterations; i++) { conn = borrow(i); try { Thread.sleep(10); if (haveConnection != null) { Assert.assertTrue( "Updater[" + id + "] loop[" + i + "] Someone else changed the connection flag", haveConnection.compareAndSet(true, false)); } } finally { manager.returnConnection(conn); } } } catch (Throwable t) { this.exception.compareAndSet(null, new Exception("ERROR Updater[" + id + "] loop[" + i + "]", t)); } } } public class DummyFactory implements ConnectionFactory { public ServerLocation nextServer = new ServerLocation("localhost", -1); protected volatile int creates; protected volatile int destroys; protected volatile int closes; protected volatile int finds; /** * Wait as long as "whileCondition" is true. * The wait will timeout after TIMEOUT_MILLIS has elapsed. * * @return the elapsed time in milliseconds. */ public synchronized long waitWhile(BooleanSupplier whileCondition) throws InterruptedException { final long startNanos = nowNanos(); long remainingMillis = TIMEOUT_MILLIS; while (whileCondition.getAsBoolean() && remainingMillis > 0) { wait(remainingMillis); remainingMillis = TIMEOUT_MILLIS - elapsedMillis(startNanos); } return elapsedMillis(startNanos); } @Override public ServerDenyList getDenyList() { return new ServerDenyList(1); } @Override public ServerLocation findBestServer(ServerLocation currentServer, Set excludedServers) { synchronized (this) { finds++; this.notifyAll(); } if (excludedServers != null) { if (excludedServers.contains(nextServer)) { return null; } } return nextServer; } @Override public Connection createClientToServerConnection(Set excluded) { return createClientToServerConnection(nextServer, true); } /* * (non-Javadoc) * * @see * org.apache.geode.cache.client.internal.ConnectionFactory#createClientToServerConnection(org. * apache.geode.distributed.internal.ServerLocation) */ @Override public Connection createClientToServerConnection(final ServerLocation location, boolean forQueue) { synchronized (this) { creates++; this.notifyAll(); } DistributedMember fakeMember = null; fakeMember = new InternalDistributedMember("localhost", 555); final DistributedMember member = fakeMember; return new Connection() { private Endpoint endpoint = endpointManager.referenceEndpoint(location, member); @Override public void destroy() { synchronized (DummyFactory.this) { destroys++; DummyFactory.this.notifyAll(); } } @Override public ServerLocation getServer() { return location; } @Override public ByteBuffer getCommBuffer() { return null; } @Override public Socket getSocket() { return null; } @Override public ConnectionStats getStats() { return null; } @Override public void close(boolean keepAlive) throws Exception { synchronized (DummyFactory.this) { closes++; DummyFactory.this.notifyAll(); } } @Override public Endpoint getEndpoint() { return endpoint; } @Override public ServerQueueStatus getQueueStatus() { return null; } @Override public Object execute(Op op) throws Exception { return op.attempt(this); } @Override public boolean isDestroyed() { return false; } @Override public void emergencyClose() {} @Override public short getWanSiteVersion() { return -1; } @Override public int getDistributedSystemId() { return -1; } @Override public void setWanSiteVersion(short wanSiteVersion) {} @Override public InputStream getInputStream() { return null; } @Override public OutputStream getOutputStream() { return null; } @Override public void setConnectionID(long id) {} @Override public long getConnectionID() { return 0; } }; } @Override public ClientUpdater createServerToClientConnection(Endpoint endpoint, QueueManager manager, boolean isPrimary, ClientUpdater failedUpdater) { return null; } } }
PurelyApplied/geode
geode-core/src/integrationTest/java/org/apache/geode/cache/client/internal/pooling/ConnectionManagerJUnitTest.java
Java
apache-2.0
32,100
# coding: utf-8 """ Kubernetes No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 The version of the OpenAPI document: release-1.23 Generated by: https://openapi-generator.tech """ import pprint import re # noqa: F401 import six from kubernetes.client.configuration import Configuration class V1beta1PolicyRulesWithSubjects(object): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. """ """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'non_resource_rules': 'list[V1beta1NonResourcePolicyRule]', 'resource_rules': 'list[V1beta1ResourcePolicyRule]', 'subjects': 'list[V1beta1Subject]' } attribute_map = { 'non_resource_rules': 'nonResourceRules', 'resource_rules': 'resourceRules', 'subjects': 'subjects' } def __init__(self, non_resource_rules=None, resource_rules=None, subjects=None, local_vars_configuration=None): # noqa: E501 """V1beta1PolicyRulesWithSubjects - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._non_resource_rules = None self._resource_rules = None self._subjects = None self.discriminator = None if non_resource_rules is not None: self.non_resource_rules = non_resource_rules if resource_rules is not None: self.resource_rules = resource_rules self.subjects = subjects @property def non_resource_rules(self): """Gets the non_resource_rules of this V1beta1PolicyRulesWithSubjects. # noqa: E501 `nonResourceRules` is a list of NonResourcePolicyRules that identify matching requests according to their verb and the target non-resource URL. # noqa: E501 :return: The non_resource_rules of this V1beta1PolicyRulesWithSubjects. # noqa: E501 :rtype: list[V1beta1NonResourcePolicyRule] """ return self._non_resource_rules @non_resource_rules.setter def non_resource_rules(self, non_resource_rules): """Sets the non_resource_rules of this V1beta1PolicyRulesWithSubjects. `nonResourceRules` is a list of NonResourcePolicyRules that identify matching requests according to their verb and the target non-resource URL. # noqa: E501 :param non_resource_rules: The non_resource_rules of this V1beta1PolicyRulesWithSubjects. # noqa: E501 :type: list[V1beta1NonResourcePolicyRule] """ self._non_resource_rules = non_resource_rules @property def resource_rules(self): """Gets the resource_rules of this V1beta1PolicyRulesWithSubjects. # noqa: E501 `resourceRules` is a slice of ResourcePolicyRules that identify matching requests according to their verb and the target resource. At least one of `resourceRules` and `nonResourceRules` has to be non-empty. # noqa: E501 :return: The resource_rules of this V1beta1PolicyRulesWithSubjects. # noqa: E501 :rtype: list[V1beta1ResourcePolicyRule] """ return self._resource_rules @resource_rules.setter def resource_rules(self, resource_rules): """Sets the resource_rules of this V1beta1PolicyRulesWithSubjects. `resourceRules` is a slice of ResourcePolicyRules that identify matching requests according to their verb and the target resource. At least one of `resourceRules` and `nonResourceRules` has to be non-empty. # noqa: E501 :param resource_rules: The resource_rules of this V1beta1PolicyRulesWithSubjects. # noqa: E501 :type: list[V1beta1ResourcePolicyRule] """ self._resource_rules = resource_rules @property def subjects(self): """Gets the subjects of this V1beta1PolicyRulesWithSubjects. # noqa: E501 subjects is the list of normal user, serviceaccount, or group that this rule cares about. There must be at least one member in this slice. A slice that includes both the system:authenticated and system:unauthenticated user groups matches every request. Required. # noqa: E501 :return: The subjects of this V1beta1PolicyRulesWithSubjects. # noqa: E501 :rtype: list[V1beta1Subject] """ return self._subjects @subjects.setter def subjects(self, subjects): """Sets the subjects of this V1beta1PolicyRulesWithSubjects. subjects is the list of normal user, serviceaccount, or group that this rule cares about. There must be at least one member in this slice. A slice that includes both the system:authenticated and system:unauthenticated user groups matches every request. Required. # noqa: E501 :param subjects: The subjects of this V1beta1PolicyRulesWithSubjects. # noqa: E501 :type: list[V1beta1Subject] """ if self.local_vars_configuration.client_side_validation and subjects is None: # noqa: E501 raise ValueError("Invalid value for `subjects`, must not be `None`") # noqa: E501 self._subjects = subjects def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, V1beta1PolicyRulesWithSubjects): return False return self.to_dict() == other.to_dict() def __ne__(self, other): """Returns true if both objects are not equal""" if not isinstance(other, V1beta1PolicyRulesWithSubjects): return True return self.to_dict() != other.to_dict()
kubernetes-client/python
kubernetes/client/models/v1beta1_policy_rules_with_subjects.py
Python
apache-2.0
7,013
package org.hl7.fhir.instance.model; /* Copyright (c) 2011+, HL7, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of HL7 nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ // Generated on Sat, Aug 22, 2015 23:00-0400 for FHIR v0.5.0 import java.util.*; import org.hl7.fhir.utilities.Utilities; import org.hl7.fhir.instance.model.annotations.ResourceDef; import org.hl7.fhir.instance.model.annotations.SearchParamDefinition; import org.hl7.fhir.instance.model.annotations.Child; import org.hl7.fhir.instance.model.annotations.Description; import org.hl7.fhir.instance.model.annotations.Block; import org.hl7.fhir.instance.model.api.*; /** * An occurrence of information being transmitted. E.g., an alert that was sent to a responsible provider, a public health agency was notified about a reportable condition. */ @ResourceDef(name="Communication", profile="http://hl7.org/fhir/Profile/Communication") public class Communication extends DomainResource { public enum CommunicationStatus { /** * The communication transmission is ongoing. */ INPROGRESS, /** * The message transmission is complete, i.e., delivered to the recipient's destination. */ COMPLETED, /** * The communication transmission has been held by originating system/user request. */ SUSPENDED, /** * The receiving system has declined to accept the message. */ REJECTED, /** * There was a failure in transmitting the message out. */ FAILED, /** * added to help the parsers */ NULL; public static CommunicationStatus fromCode(String codeString) throws Exception { if (codeString == null || "".equals(codeString)) return null; if ("in-progress".equals(codeString)) return INPROGRESS; if ("completed".equals(codeString)) return COMPLETED; if ("suspended".equals(codeString)) return SUSPENDED; if ("rejected".equals(codeString)) return REJECTED; if ("failed".equals(codeString)) return FAILED; throw new Exception("Unknown CommunicationStatus code '"+codeString+"'"); } public String toCode() { switch (this) { case INPROGRESS: return "in-progress"; case COMPLETED: return "completed"; case SUSPENDED: return "suspended"; case REJECTED: return "rejected"; case FAILED: return "failed"; default: return "?"; } } public String getSystem() { switch (this) { case INPROGRESS: return "http://hl7.org/fhir/communication-status"; case COMPLETED: return "http://hl7.org/fhir/communication-status"; case SUSPENDED: return "http://hl7.org/fhir/communication-status"; case REJECTED: return "http://hl7.org/fhir/communication-status"; case FAILED: return "http://hl7.org/fhir/communication-status"; default: return "?"; } } public String getDefinition() { switch (this) { case INPROGRESS: return "The communication transmission is ongoing."; case COMPLETED: return "The message transmission is complete, i.e., delivered to the recipient's destination."; case SUSPENDED: return "The communication transmission has been held by originating system/user request."; case REJECTED: return "The receiving system has declined to accept the message."; case FAILED: return "There was a failure in transmitting the message out."; default: return "?"; } } public String getDisplay() { switch (this) { case INPROGRESS: return "In Progress"; case COMPLETED: return "Completed"; case SUSPENDED: return "Suspended"; case REJECTED: return "Rejected"; case FAILED: return "Failed"; default: return "?"; } } } public static class CommunicationStatusEnumFactory implements EnumFactory<CommunicationStatus> { public CommunicationStatus fromCode(String codeString) throws IllegalArgumentException { if (codeString == null || "".equals(codeString)) if (codeString == null || "".equals(codeString)) return null; if ("in-progress".equals(codeString)) return CommunicationStatus.INPROGRESS; if ("completed".equals(codeString)) return CommunicationStatus.COMPLETED; if ("suspended".equals(codeString)) return CommunicationStatus.SUSPENDED; if ("rejected".equals(codeString)) return CommunicationStatus.REJECTED; if ("failed".equals(codeString)) return CommunicationStatus.FAILED; throw new IllegalArgumentException("Unknown CommunicationStatus code '"+codeString+"'"); } public String toCode(CommunicationStatus code) { if (code == CommunicationStatus.INPROGRESS) return "in-progress"; if (code == CommunicationStatus.COMPLETED) return "completed"; if (code == CommunicationStatus.SUSPENDED) return "suspended"; if (code == CommunicationStatus.REJECTED) return "rejected"; if (code == CommunicationStatus.FAILED) return "failed"; return "?"; } } @Block() public static class CommunicationPayloadComponent extends BackboneElement implements IBaseBackboneElement { /** * An individual message part for multi-part messages. */ @Child(name = "content", type = {StringType.class, Attachment.class}, order=1, min=1, max=1, modifier=false, summary=true) @Description(shortDefinition="Message part content", formalDefinition="An individual message part for multi-part messages." ) protected Type content; private static final long serialVersionUID = -1763459053L; /* * Constructor */ public CommunicationPayloadComponent() { super(); } /* * Constructor */ public CommunicationPayloadComponent(Type content) { super(); this.content = content; } /** * @return {@link #content} (An individual message part for multi-part messages.) */ public Type getContent() { return this.content; } /** * @return {@link #content} (An individual message part for multi-part messages.) */ public StringType getContentStringType() throws Exception { if (!(this.content instanceof StringType)) throw new Exception("Type mismatch: the type StringType was expected, but "+this.content.getClass().getName()+" was encountered"); return (StringType) this.content; } public boolean hasContentStringType() throws Exception { return this.content instanceof StringType; } /** * @return {@link #content} (An individual message part for multi-part messages.) */ public Attachment getContentAttachment() throws Exception { if (!(this.content instanceof Attachment)) throw new Exception("Type mismatch: the type Attachment was expected, but "+this.content.getClass().getName()+" was encountered"); return (Attachment) this.content; } public boolean hasContentAttachment() throws Exception { return this.content instanceof Attachment; } /** * @return {@link #content} (An individual message part for multi-part messages.) */ public Reference getContentReference() throws Exception { if (!(this.content instanceof Reference)) throw new Exception("Type mismatch: the type Reference was expected, but "+this.content.getClass().getName()+" was encountered"); return (Reference) this.content; } public boolean hasContentReference() throws Exception { return this.content instanceof Reference; } public boolean hasContent() { return this.content != null && !this.content.isEmpty(); } /** * @param value {@link #content} (An individual message part for multi-part messages.) */ public CommunicationPayloadComponent setContent(Type value) { this.content = value; return this; } protected void listChildren(List<Property> childrenList) { super.listChildren(childrenList); childrenList.add(new Property("content[x]", "string|Attachment|Reference(Any)", "An individual message part for multi-part messages.", 0, java.lang.Integer.MAX_VALUE, content)); } public CommunicationPayloadComponent copy() { CommunicationPayloadComponent dst = new CommunicationPayloadComponent(); copyValues(dst); dst.content = content == null ? null : content.copy(); return dst; } @Override public boolean equalsDeep(Base other) { if (!super.equalsDeep(other)) return false; if (!(other instanceof CommunicationPayloadComponent)) return false; CommunicationPayloadComponent o = (CommunicationPayloadComponent) other; return compareDeep(content, o.content, true); } @Override public boolean equalsShallow(Base other) { if (!super.equalsShallow(other)) return false; if (!(other instanceof CommunicationPayloadComponent)) return false; CommunicationPayloadComponent o = (CommunicationPayloadComponent) other; return true; } public boolean isEmpty() { return super.isEmpty() && (content == null || content.isEmpty()); } } /** * Identifiers associated with this Communication that are defined by business processes and/ or used to refer to it when a direct URL reference to the resource itself is not appropriate (e.g. in CDA documents, or in written / printed documentation). */ @Child(name = "identifier", type = {Identifier.class}, order=0, min=0, max=Child.MAX_UNLIMITED, modifier=false, summary=true) @Description(shortDefinition="Unique identifier", formalDefinition="Identifiers associated with this Communication that are defined by business processes and/ or used to refer to it when a direct URL reference to the resource itself is not appropriate (e.g. in CDA documents, or in written / printed documentation)." ) protected List<Identifier> identifier; /** * The type of message conveyed such as alert, notification, reminder, instruction, etc. */ @Child(name = "category", type = {CodeableConcept.class}, order=1, min=0, max=1, modifier=false, summary=true) @Description(shortDefinition="Message category", formalDefinition="The type of message conveyed such as alert, notification, reminder, instruction, etc." ) protected CodeableConcept category; /** * The entity (e.g., person, organization, clinical information system, or device) which was the source of the communication. */ @Child(name = "sender", type = {Device.class, Organization.class, Patient.class, Practitioner.class, RelatedPerson.class}, order=2, min=0, max=1, modifier=false, summary=true) @Description(shortDefinition="Message sender", formalDefinition="The entity (e.g., person, organization, clinical information system, or device) which was the source of the communication." ) protected Reference sender; /** * The actual object that is the target of the reference (The entity (e.g., person, organization, clinical information system, or device) which was the source of the communication.) */ protected Resource senderTarget; /** * The entity (e.g., person, organization, clinical information system, or device) which was the target of the communication. */ @Child(name = "recipient", type = {Device.class, Organization.class, Patient.class, Practitioner.class, RelatedPerson.class}, order=3, min=0, max=Child.MAX_UNLIMITED, modifier=false, summary=true) @Description(shortDefinition="Message recipient", formalDefinition="The entity (e.g., person, organization, clinical information system, or device) which was the target of the communication." ) protected List<Reference> recipient; /** * The actual objects that are the target of the reference (The entity (e.g., person, organization, clinical information system, or device) which was the target of the communication.) */ protected List<Resource> recipientTarget; /** * Text, attachment(s), or resource(s) that was communicated to the recipient. */ @Child(name = "payload", type = {}, order=4, min=0, max=Child.MAX_UNLIMITED, modifier=false, summary=true) @Description(shortDefinition="Message payload", formalDefinition="Text, attachment(s), or resource(s) that was communicated to the recipient." ) protected List<CommunicationPayloadComponent> payload; /** * The communication medium, e.g., email, fax. */ @Child(name = "medium", type = {CodeableConcept.class}, order=5, min=0, max=Child.MAX_UNLIMITED, modifier=false, summary=true) @Description(shortDefinition="Communication medium", formalDefinition="The communication medium, e.g., email, fax." ) protected List<CodeableConcept> medium; /** * The status of the transmission. */ @Child(name = "status", type = {CodeType.class}, order=6, min=0, max=1, modifier=true, summary=true) @Description(shortDefinition="in-progress | completed | suspended | rejected | failed", formalDefinition="The status of the transmission." ) protected Enumeration<CommunicationStatus> status; /** * The encounter within which the communication was sent. */ @Child(name = "encounter", type = {Encounter.class}, order=7, min=0, max=1, modifier=false, summary=true) @Description(shortDefinition="Encounter leading to message", formalDefinition="The encounter within which the communication was sent." ) protected Reference encounter; /** * The actual object that is the target of the reference (The encounter within which the communication was sent.) */ protected Encounter encounterTarget; /** * The time when this communication was sent. */ @Child(name = "sent", type = {DateTimeType.class}, order=8, min=0, max=1, modifier=false, summary=true) @Description(shortDefinition="When sent", formalDefinition="The time when this communication was sent." ) protected DateTimeType sent; /** * The time when this communication arrived at the destination. */ @Child(name = "received", type = {DateTimeType.class}, order=9, min=0, max=1, modifier=false, summary=true) @Description(shortDefinition="When received", formalDefinition="The time when this communication arrived at the destination." ) protected DateTimeType received; /** * The reason or justification for the communication. */ @Child(name = "reason", type = {CodeableConcept.class}, order=10, min=0, max=Child.MAX_UNLIMITED, modifier=false, summary=true) @Description(shortDefinition="Indication for message", formalDefinition="The reason or justification for the communication." ) protected List<CodeableConcept> reason; /** * The patient who was the focus of this communication. */ @Child(name = "subject", type = {Patient.class}, order=11, min=0, max=1, modifier=false, summary=true) @Description(shortDefinition="Focus of message", formalDefinition="The patient who was the focus of this communication." ) protected Reference subject; /** * The actual object that is the target of the reference (The patient who was the focus of this communication.) */ protected Patient subjectTarget; private static final long serialVersionUID = -744574729L; /* * Constructor */ public Communication() { super(); } /** * @return {@link #identifier} (Identifiers associated with this Communication that are defined by business processes and/ or used to refer to it when a direct URL reference to the resource itself is not appropriate (e.g. in CDA documents, or in written / printed documentation).) */ public List<Identifier> getIdentifier() { if (this.identifier == null) this.identifier = new ArrayList<Identifier>(); return this.identifier; } public boolean hasIdentifier() { if (this.identifier == null) return false; for (Identifier item : this.identifier) if (!item.isEmpty()) return true; return false; } /** * @return {@link #identifier} (Identifiers associated with this Communication that are defined by business processes and/ or used to refer to it when a direct URL reference to the resource itself is not appropriate (e.g. in CDA documents, or in written / printed documentation).) */ // syntactic sugar public Identifier addIdentifier() { //3 Identifier t = new Identifier(); if (this.identifier == null) this.identifier = new ArrayList<Identifier>(); this.identifier.add(t); return t; } // syntactic sugar public Communication addIdentifier(Identifier t) { //3 if (t == null) return this; if (this.identifier == null) this.identifier = new ArrayList<Identifier>(); this.identifier.add(t); return this; } /** * @return {@link #category} (The type of message conveyed such as alert, notification, reminder, instruction, etc.) */ public CodeableConcept getCategory() { if (this.category == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create Communication.category"); else if (Configuration.doAutoCreate()) this.category = new CodeableConcept(); // cc return this.category; } public boolean hasCategory() { return this.category != null && !this.category.isEmpty(); } /** * @param value {@link #category} (The type of message conveyed such as alert, notification, reminder, instruction, etc.) */ public Communication setCategory(CodeableConcept value) { this.category = value; return this; } /** * @return {@link #sender} (The entity (e.g., person, organization, clinical information system, or device) which was the source of the communication.) */ public Reference getSender() { if (this.sender == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create Communication.sender"); else if (Configuration.doAutoCreate()) this.sender = new Reference(); // cc return this.sender; } public boolean hasSender() { return this.sender != null && !this.sender.isEmpty(); } /** * @param value {@link #sender} (The entity (e.g., person, organization, clinical information system, or device) which was the source of the communication.) */ public Communication setSender(Reference value) { this.sender = value; return this; } /** * @return {@link #sender} The actual object that is the target of the reference. The reference library doesn't populate this, but you can use it to hold the resource if you resolve it. (The entity (e.g., person, organization, clinical information system, or device) which was the source of the communication.) */ public Resource getSenderTarget() { return this.senderTarget; } /** * @param value {@link #sender} The actual object that is the target of the reference. The reference library doesn't use these, but you can use it to hold the resource if you resolve it. (The entity (e.g., person, organization, clinical information system, or device) which was the source of the communication.) */ public Communication setSenderTarget(Resource value) { this.senderTarget = value; return this; } /** * @return {@link #recipient} (The entity (e.g., person, organization, clinical information system, or device) which was the target of the communication.) */ public List<Reference> getRecipient() { if (this.recipient == null) this.recipient = new ArrayList<Reference>(); return this.recipient; } public boolean hasRecipient() { if (this.recipient == null) return false; for (Reference item : this.recipient) if (!item.isEmpty()) return true; return false; } /** * @return {@link #recipient} (The entity (e.g., person, organization, clinical information system, or device) which was the target of the communication.) */ // syntactic sugar public Reference addRecipient() { //3 Reference t = new Reference(); if (this.recipient == null) this.recipient = new ArrayList<Reference>(); this.recipient.add(t); return t; } // syntactic sugar public Communication addRecipient(Reference t) { //3 if (t == null) return this; if (this.recipient == null) this.recipient = new ArrayList<Reference>(); this.recipient.add(t); return this; } /** * @return {@link #recipient} (The actual objects that are the target of the reference. The reference library doesn't populate this, but you can use this to hold the resources if you resolvethemt. The entity (e.g., person, organization, clinical information system, or device) which was the target of the communication.) */ public List<Resource> getRecipientTarget() { if (this.recipientTarget == null) this.recipientTarget = new ArrayList<Resource>(); return this.recipientTarget; } /** * @return {@link #payload} (Text, attachment(s), or resource(s) that was communicated to the recipient.) */ public List<CommunicationPayloadComponent> getPayload() { if (this.payload == null) this.payload = new ArrayList<CommunicationPayloadComponent>(); return this.payload; } public boolean hasPayload() { if (this.payload == null) return false; for (CommunicationPayloadComponent item : this.payload) if (!item.isEmpty()) return true; return false; } /** * @return {@link #payload} (Text, attachment(s), or resource(s) that was communicated to the recipient.) */ // syntactic sugar public CommunicationPayloadComponent addPayload() { //3 CommunicationPayloadComponent t = new CommunicationPayloadComponent(); if (this.payload == null) this.payload = new ArrayList<CommunicationPayloadComponent>(); this.payload.add(t); return t; } // syntactic sugar public Communication addPayload(CommunicationPayloadComponent t) { //3 if (t == null) return this; if (this.payload == null) this.payload = new ArrayList<CommunicationPayloadComponent>(); this.payload.add(t); return this; } /** * @return {@link #medium} (The communication medium, e.g., email, fax.) */ public List<CodeableConcept> getMedium() { if (this.medium == null) this.medium = new ArrayList<CodeableConcept>(); return this.medium; } public boolean hasMedium() { if (this.medium == null) return false; for (CodeableConcept item : this.medium) if (!item.isEmpty()) return true; return false; } /** * @return {@link #medium} (The communication medium, e.g., email, fax.) */ // syntactic sugar public CodeableConcept addMedium() { //3 CodeableConcept t = new CodeableConcept(); if (this.medium == null) this.medium = new ArrayList<CodeableConcept>(); this.medium.add(t); return t; } // syntactic sugar public Communication addMedium(CodeableConcept t) { //3 if (t == null) return this; if (this.medium == null) this.medium = new ArrayList<CodeableConcept>(); this.medium.add(t); return this; } /** * @return {@link #status} (The status of the transmission.). This is the underlying object with id, value and extensions. The accessor "getStatus" gives direct access to the value */ public Enumeration<CommunicationStatus> getStatusElement() { if (this.status == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create Communication.status"); else if (Configuration.doAutoCreate()) this.status = new Enumeration<CommunicationStatus>(new CommunicationStatusEnumFactory()); // bb return this.status; } public boolean hasStatusElement() { return this.status != null && !this.status.isEmpty(); } public boolean hasStatus() { return this.status != null && !this.status.isEmpty(); } /** * @param value {@link #status} (The status of the transmission.). This is the underlying object with id, value and extensions. The accessor "getStatus" gives direct access to the value */ public Communication setStatusElement(Enumeration<CommunicationStatus> value) { this.status = value; return this; } /** * @return The status of the transmission. */ public CommunicationStatus getStatus() { return this.status == null ? null : this.status.getValue(); } /** * @param value The status of the transmission. */ public Communication setStatus(CommunicationStatus value) { if (value == null) this.status = null; else { if (this.status == null) this.status = new Enumeration<CommunicationStatus>(new CommunicationStatusEnumFactory()); this.status.setValue(value); } return this; } /** * @return {@link #encounter} (The encounter within which the communication was sent.) */ public Reference getEncounter() { if (this.encounter == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create Communication.encounter"); else if (Configuration.doAutoCreate()) this.encounter = new Reference(); // cc return this.encounter; } public boolean hasEncounter() { return this.encounter != null && !this.encounter.isEmpty(); } /** * @param value {@link #encounter} (The encounter within which the communication was sent.) */ public Communication setEncounter(Reference value) { this.encounter = value; return this; } /** * @return {@link #encounter} The actual object that is the target of the reference. The reference library doesn't populate this, but you can use it to hold the resource if you resolve it. (The encounter within which the communication was sent.) */ public Encounter getEncounterTarget() { if (this.encounterTarget == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create Communication.encounter"); else if (Configuration.doAutoCreate()) this.encounterTarget = new Encounter(); // aa return this.encounterTarget; } /** * @param value {@link #encounter} The actual object that is the target of the reference. The reference library doesn't use these, but you can use it to hold the resource if you resolve it. (The encounter within which the communication was sent.) */ public Communication setEncounterTarget(Encounter value) { this.encounterTarget = value; return this; } /** * @return {@link #sent} (The time when this communication was sent.). This is the underlying object with id, value and extensions. The accessor "getSent" gives direct access to the value */ public DateTimeType getSentElement() { if (this.sent == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create Communication.sent"); else if (Configuration.doAutoCreate()) this.sent = new DateTimeType(); // bb return this.sent; } public boolean hasSentElement() { return this.sent != null && !this.sent.isEmpty(); } public boolean hasSent() { return this.sent != null && !this.sent.isEmpty(); } /** * @param value {@link #sent} (The time when this communication was sent.). This is the underlying object with id, value and extensions. The accessor "getSent" gives direct access to the value */ public Communication setSentElement(DateTimeType value) { this.sent = value; return this; } /** * @return The time when this communication was sent. */ public Date getSent() { return this.sent == null ? null : this.sent.getValue(); } /** * @param value The time when this communication was sent. */ public Communication setSent(Date value) { if (value == null) this.sent = null; else { if (this.sent == null) this.sent = new DateTimeType(); this.sent.setValue(value); } return this; } /** * @return {@link #received} (The time when this communication arrived at the destination.). This is the underlying object with id, value and extensions. The accessor "getReceived" gives direct access to the value */ public DateTimeType getReceivedElement() { if (this.received == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create Communication.received"); else if (Configuration.doAutoCreate()) this.received = new DateTimeType(); // bb return this.received; } public boolean hasReceivedElement() { return this.received != null && !this.received.isEmpty(); } public boolean hasReceived() { return this.received != null && !this.received.isEmpty(); } /** * @param value {@link #received} (The time when this communication arrived at the destination.). This is the underlying object with id, value and extensions. The accessor "getReceived" gives direct access to the value */ public Communication setReceivedElement(DateTimeType value) { this.received = value; return this; } /** * @return The time when this communication arrived at the destination. */ public Date getReceived() { return this.received == null ? null : this.received.getValue(); } /** * @param value The time when this communication arrived at the destination. */ public Communication setReceived(Date value) { if (value == null) this.received = null; else { if (this.received == null) this.received = new DateTimeType(); this.received.setValue(value); } return this; } /** * @return {@link #reason} (The reason or justification for the communication.) */ public List<CodeableConcept> getReason() { if (this.reason == null) this.reason = new ArrayList<CodeableConcept>(); return this.reason; } public boolean hasReason() { if (this.reason == null) return false; for (CodeableConcept item : this.reason) if (!item.isEmpty()) return true; return false; } /** * @return {@link #reason} (The reason or justification for the communication.) */ // syntactic sugar public CodeableConcept addReason() { //3 CodeableConcept t = new CodeableConcept(); if (this.reason == null) this.reason = new ArrayList<CodeableConcept>(); this.reason.add(t); return t; } // syntactic sugar public Communication addReason(CodeableConcept t) { //3 if (t == null) return this; if (this.reason == null) this.reason = new ArrayList<CodeableConcept>(); this.reason.add(t); return this; } /** * @return {@link #subject} (The patient who was the focus of this communication.) */ public Reference getSubject() { if (this.subject == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create Communication.subject"); else if (Configuration.doAutoCreate()) this.subject = new Reference(); // cc return this.subject; } public boolean hasSubject() { return this.subject != null && !this.subject.isEmpty(); } /** * @param value {@link #subject} (The patient who was the focus of this communication.) */ public Communication setSubject(Reference value) { this.subject = value; return this; } /** * @return {@link #subject} The actual object that is the target of the reference. The reference library doesn't populate this, but you can use it to hold the resource if you resolve it. (The patient who was the focus of this communication.) */ public Patient getSubjectTarget() { if (this.subjectTarget == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create Communication.subject"); else if (Configuration.doAutoCreate()) this.subjectTarget = new Patient(); // aa return this.subjectTarget; } /** * @param value {@link #subject} The actual object that is the target of the reference. The reference library doesn't use these, but you can use it to hold the resource if you resolve it. (The patient who was the focus of this communication.) */ public Communication setSubjectTarget(Patient value) { this.subjectTarget = value; return this; } protected void listChildren(List<Property> childrenList) { super.listChildren(childrenList); childrenList.add(new Property("identifier", "Identifier", "Identifiers associated with this Communication that are defined by business processes and/ or used to refer to it when a direct URL reference to the resource itself is not appropriate (e.g. in CDA documents, or in written / printed documentation).", 0, java.lang.Integer.MAX_VALUE, identifier)); childrenList.add(new Property("category", "CodeableConcept", "The type of message conveyed such as alert, notification, reminder, instruction, etc.", 0, java.lang.Integer.MAX_VALUE, category)); childrenList.add(new Property("sender", "Reference(Device|Organization|Patient|Practitioner|RelatedPerson)", "The entity (e.g., person, organization, clinical information system, or device) which was the source of the communication.", 0, java.lang.Integer.MAX_VALUE, sender)); childrenList.add(new Property("recipient", "Reference(Device|Organization|Patient|Practitioner|RelatedPerson)", "The entity (e.g., person, organization, clinical information system, or device) which was the target of the communication.", 0, java.lang.Integer.MAX_VALUE, recipient)); childrenList.add(new Property("payload", "", "Text, attachment(s), or resource(s) that was communicated to the recipient.", 0, java.lang.Integer.MAX_VALUE, payload)); childrenList.add(new Property("medium", "CodeableConcept", "The communication medium, e.g., email, fax.", 0, java.lang.Integer.MAX_VALUE, medium)); childrenList.add(new Property("status", "code", "The status of the transmission.", 0, java.lang.Integer.MAX_VALUE, status)); childrenList.add(new Property("encounter", "Reference(Encounter)", "The encounter within which the communication was sent.", 0, java.lang.Integer.MAX_VALUE, encounter)); childrenList.add(new Property("sent", "dateTime", "The time when this communication was sent.", 0, java.lang.Integer.MAX_VALUE, sent)); childrenList.add(new Property("received", "dateTime", "The time when this communication arrived at the destination.", 0, java.lang.Integer.MAX_VALUE, received)); childrenList.add(new Property("reason", "CodeableConcept", "The reason or justification for the communication.", 0, java.lang.Integer.MAX_VALUE, reason)); childrenList.add(new Property("subject", "Reference(Patient)", "The patient who was the focus of this communication.", 0, java.lang.Integer.MAX_VALUE, subject)); } public Communication copy() { Communication dst = new Communication(); copyValues(dst); if (identifier != null) { dst.identifier = new ArrayList<Identifier>(); for (Identifier i : identifier) dst.identifier.add(i.copy()); }; dst.category = category == null ? null : category.copy(); dst.sender = sender == null ? null : sender.copy(); if (recipient != null) { dst.recipient = new ArrayList<Reference>(); for (Reference i : recipient) dst.recipient.add(i.copy()); }; if (payload != null) { dst.payload = new ArrayList<CommunicationPayloadComponent>(); for (CommunicationPayloadComponent i : payload) dst.payload.add(i.copy()); }; if (medium != null) { dst.medium = new ArrayList<CodeableConcept>(); for (CodeableConcept i : medium) dst.medium.add(i.copy()); }; dst.status = status == null ? null : status.copy(); dst.encounter = encounter == null ? null : encounter.copy(); dst.sent = sent == null ? null : sent.copy(); dst.received = received == null ? null : received.copy(); if (reason != null) { dst.reason = new ArrayList<CodeableConcept>(); for (CodeableConcept i : reason) dst.reason.add(i.copy()); }; dst.subject = subject == null ? null : subject.copy(); return dst; } protected Communication typedCopy() { return copy(); } @Override public boolean equalsDeep(Base other) { if (!super.equalsDeep(other)) return false; if (!(other instanceof Communication)) return false; Communication o = (Communication) other; return compareDeep(identifier, o.identifier, true) && compareDeep(category, o.category, true) && compareDeep(sender, o.sender, true) && compareDeep(recipient, o.recipient, true) && compareDeep(payload, o.payload, true) && compareDeep(medium, o.medium, true) && compareDeep(status, o.status, true) && compareDeep(encounter, o.encounter, true) && compareDeep(sent, o.sent, true) && compareDeep(received, o.received, true) && compareDeep(reason, o.reason, true) && compareDeep(subject, o.subject, true) ; } @Override public boolean equalsShallow(Base other) { if (!super.equalsShallow(other)) return false; if (!(other instanceof Communication)) return false; Communication o = (Communication) other; return compareValues(status, o.status, true) && compareValues(sent, o.sent, true) && compareValues(received, o.received, true) ; } public boolean isEmpty() { return super.isEmpty() && (identifier == null || identifier.isEmpty()) && (category == null || category.isEmpty()) && (sender == null || sender.isEmpty()) && (recipient == null || recipient.isEmpty()) && (payload == null || payload.isEmpty()) && (medium == null || medium.isEmpty()) && (status == null || status.isEmpty()) && (encounter == null || encounter.isEmpty()) && (sent == null || sent.isEmpty()) && (received == null || received.isEmpty()) && (reason == null || reason.isEmpty()) && (subject == null || subject.isEmpty()); } @Override public ResourceType getResourceType() { return ResourceType.Communication; } @SearchParamDefinition(name="identifier", path="Communication.identifier", description="Unique identifier", type="token" ) public static final String SP_IDENTIFIER = "identifier"; @SearchParamDefinition(name="sender", path="Communication.sender", description="Message sender", type="reference" ) public static final String SP_SENDER = "sender"; @SearchParamDefinition(name="subject", path="Communication.subject", description="Focus of message", type="reference" ) public static final String SP_SUBJECT = "subject"; @SearchParamDefinition(name="patient", path="Communication.subject", description="Focus of message", type="reference" ) public static final String SP_PATIENT = "patient"; @SearchParamDefinition(name="recipient", path="Communication.recipient", description="Message recipient", type="reference" ) public static final String SP_RECIPIENT = "recipient"; @SearchParamDefinition(name="received", path="Communication.received", description="When received", type="date" ) public static final String SP_RECEIVED = "received"; @SearchParamDefinition(name="medium", path="Communication.medium", description="Communication medium", type="token" ) public static final String SP_MEDIUM = "medium"; @SearchParamDefinition(name="encounter", path="Communication.encounter", description="Encounter leading to message", type="reference" ) public static final String SP_ENCOUNTER = "encounter"; @SearchParamDefinition(name="category", path="Communication.category", description="Message category", type="token" ) public static final String SP_CATEGORY = "category"; @SearchParamDefinition(name="sent", path="Communication.sent", description="When sent", type="date" ) public static final String SP_SENT = "sent"; @SearchParamDefinition(name="status", path="Communication.status", description="in-progress | completed | suspended | rejected | failed", type="token" ) public static final String SP_STATUS = "status"; }
Nodstuff/hapi-fhir
hapi-fhir-structures-hl7org-dstu2/src/main/java/org/hl7/fhir/instance/model/Communication.java
Java
apache-2.0
44,529
module Dsc class Resource attr_accessor :resource_cim_class def initialize(mof_class, mof_path) @resource_mof_path = mof_path @resource_cim_class = mof_class @name = nil @friendlyname = nil @properties = nil @array_properties = nil @valuated_properties = nil @required_properties = nil @filtered_properties = nil @embedded_properties = nil @dsc_module = nil @ps_module = nil end def relative_mof_path Pathname.new(@resource_mof_path).relative_path_from(Pathname.new(File.expand_path("#{LIB_PATH}/.."))).to_s end def friendlyname @friendlyname ||= @resource_cim_class.qualifiers['Friendlyname'].value if @resource_cim_class.qualifiers['Friendlyname'] end def name @ame ||= @resource_cim_class.name end def instance_name "Dsc_#{self.friendlyname.downcase}" end def properties unless @properties @properties ||= @resource_cim_class.features.collect{|cim_feature| Dsc::Property.new(cim_feature) } end @properties end def array_properties unless @array_properties @array_properties ||= properties.select{|rp| rp.array? } end @array_properties end def valuated_properties unless @valuated_properties @valuated_properties ||= properties.select{|rp| rp.values } end @valuated_properties end def required_properties unless @required_properties @required_properties ||= properties.select{|rp| rp.required? } end @required_properties end def filtered_properties unless @filtered_properties @filtered_properties = properties.select{|p| !p.dependable? && !p.is_ensure? }.sort_by { |p| [p.required? ? 0 : 1, p.name.downcase] } end @filtered_properties end def embedded_properties unless @embedded_properties @embedded_properties ||= properties.select{|rp| rp.embeddedinstance? } end @embedded_properties end def ensurable? properties.detect{|p|p.is_ensure?} ? true : false end def absentable? properties.detect do |p| p.is_ensure? && p.values.any? { |v| v.casecmp('absent') == 0 } end end def has_name? properties.detect{|p|p.is_name?} ? true : false end def has_embeddedinstances? properties.detect{|p|p.embeddedinstance?} ? true : false end def ps_module unless @ps_module path_array = @resource_mof_path.split('/') revert_array = path_array.reverse downcased_array = revert_array.collect{|p| p.downcase} index = downcased_array.index('dscresources') raise "module for #{self.name} not found" if index == nil module_name = revert_array[index + 1 ] rescue nil module_dir = path_array[0..(path_array.count - (index + 2))].join('/') module_manifest_path = "#{module_dir}/#{module_name}.psd1" raise "module manifest #{module_manifest_path} not found" unless File.exists?(module_manifest_path) @ps_module = Dsc::Psmodule.new(module_name, module_manifest_path) end @ps_module end end end
cyberious/puppetlabs-dsc
build/dsc/resource.rb
Ruby
apache-2.0
3,270
/** * P6Spy * * Copyright (C) 2002 P6Spy * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.p6spy.engine.logging; import com.p6spy.engine.event.JdbcEventListener; import com.p6spy.engine.spy.P6Factory; import com.p6spy.engine.spy.P6LoadableOptions; import com.p6spy.engine.spy.option.P6OptionsRepository; import java.util.Iterator; import java.util.ServiceLoader; public class P6LogFactory implements P6Factory { private static ServiceLoader<LoggingEventListener> customLoggingEventListener = ServiceLoader .load(LoggingEventListener.class, P6LogFactory.class.getClassLoader()); @Override public JdbcEventListener getJdbcEventListener() { // return first custom implementation Iterator<LoggingEventListener> iterator = customLoggingEventListener.iterator(); if (iterator.hasNext()) { return iterator.next(); } // if none found, return default impl return LoggingEventListener.INSTANCE; } @Override public P6LoadableOptions getOptions(P6OptionsRepository optionsRepository) { return new P6LogOptions(optionsRepository); } }
p6spy/p6spy
src/main/java/com/p6spy/engine/logging/P6LogFactory.java
Java
apache-2.0
1,609
<?php class Migration_Create_buildings extends CI_Migration { public function up() { $this->dbforge->add_field(array( 'id' => array( 'type' => 'INT', 'constraint' => 11, 'unsigned' => TRUE, 'auto_increment' => TRUE ), 'name' => array( 'type' => 'VARCHAR', 'constraint' => '150', ), 'idDirection' => array( 'type' => 'INT', 'constraint' => 11 ), 'idUsers' => array( 'type' => 'INT', 'constraint' => 11 ) )); $this->dbforge->add_key('id'); $this->dbforge->create_table('buildings'); } public function down() { $this->dbforge->drop_table('buildings'); } } /** select nb.url, nb.nombreNavBar, nb.idMenuSubMenu, nb.iconClass, nb.status from navbars nb INNER join navbars_profiles nbp on nb.id = nbp.idMenu INNER join profiles p on p.idProfile = nbp.idProfile where p.idProfile = 1 and nb.status = 1 /
jossenino/Roinn
application/migrations/008_Create_buildings.php
PHP
apache-2.0
958
package com.libreworks.stellarbase.persistence.criteria; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.util.Arrays; import org.junit.Test; public class LikePredicateTest { @Test public void testEvaluateLike() { ValueExpression<String> spatula = ValueExpression.of("Spatula"); ValueExpression<String> knife = ValueExpression.of("Knife"); for (String pattern : Arrays.asList("Spat%", "%ula", "%atul%", "Sp%la", "Spa_ula")) { assertTrue("Doesn't match pattern " + pattern, new LikePredicate(spatula, ValueExpression.of(pattern), false).evaluate(null)); } for (String pattern : Arrays.asList("Spat%", "%ula", "%atul%", "Sp%la", "Spa_ula")) { assertFalse("Matches pattern " + pattern, new LikePredicate(knife, ValueExpression.of(pattern), false).evaluate(null)); } } @Test public void testEvaluateNotLike() { ValueExpression<String> spatula = ValueExpression.of("Spatula"); ValueExpression<String> knife = ValueExpression.of("Knife"); for (String pattern : Arrays.asList("Spat%", "%ula", "%atul%", "Sp%la", "Spa_ula")) { assertFalse("Matches pattern " + pattern, new LikePredicate(spatula, ValueExpression.of(pattern), true).evaluate(null)); } for (String pattern : Arrays.asList("Spat%", "%ula", "%atul%", "Sp%la", "Spa_ula")) { assertTrue("Doesn't match pattern " + pattern, new LikePredicate(knife, ValueExpression.of(pattern), true).evaluate(null)); } } }
libreworks/stellarbase
stellarbase-dao/src/test/java/com/libreworks/stellarbase/persistence/criteria/LikePredicateTest.java
Java
apache-2.0
1,547
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.analyzer; import com.facebook.presto.Session; import com.facebook.presto.block.BlockEncodingManager; import com.facebook.presto.connector.ConnectorId; import com.facebook.presto.connector.informationSchema.InformationSchemaConnector; import com.facebook.presto.connector.system.SystemConnector; import com.facebook.presto.metadata.Catalog; import com.facebook.presto.metadata.CatalogManager; import com.facebook.presto.metadata.InMemoryNodeManager; import com.facebook.presto.metadata.InternalNodeManager; import com.facebook.presto.metadata.Metadata; import com.facebook.presto.metadata.MetadataManager; import com.facebook.presto.metadata.QualifiedObjectName; import com.facebook.presto.metadata.SchemaPropertyManager; import com.facebook.presto.metadata.SessionPropertyManager; import com.facebook.presto.metadata.TablePropertyManager; import com.facebook.presto.metadata.TestingMetadata; import com.facebook.presto.metadata.ViewDefinition; import com.facebook.presto.security.AccessControl; import com.facebook.presto.security.AccessControlManager; import com.facebook.presto.security.AllowAllAccessControl; import com.facebook.presto.spi.ColumnMetadata; import com.facebook.presto.spi.ConnectorTableMetadata; import com.facebook.presto.spi.SchemaTableName; import com.facebook.presto.spi.connector.Connector; import com.facebook.presto.spi.connector.ConnectorMetadata; import com.facebook.presto.spi.connector.ConnectorSplitManager; import com.facebook.presto.spi.connector.ConnectorTransactionHandle; import com.facebook.presto.spi.transaction.IsolationLevel; import com.facebook.presto.spi.type.TypeManager; import com.facebook.presto.sql.parser.SqlParser; import com.facebook.presto.sql.tree.Statement; import com.facebook.presto.transaction.TransactionManager; import com.facebook.presto.type.ArrayType; import com.facebook.presto.type.TypeRegistry; import com.google.common.collect.ImmutableList; import io.airlift.json.JsonCodec; import org.intellij.lang.annotations.Language; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import java.util.Optional; import java.util.function.Consumer; import static com.facebook.presto.connector.ConnectorId.createInformationSchemaConnectorId; import static com.facebook.presto.connector.ConnectorId.createSystemTablesConnectorId; import static com.facebook.presto.metadata.ViewDefinition.ViewColumn; import static com.facebook.presto.operator.scalar.ApplyFunction.APPLY_FUNCTION; import static com.facebook.presto.spi.type.BigintType.BIGINT; import static com.facebook.presto.spi.type.DoubleType.DOUBLE; import static com.facebook.presto.spi.type.VarcharType.VARCHAR; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.AMBIGUOUS_ATTRIBUTE; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.CANNOT_HAVE_AGGREGATIONS_OR_WINDOWS; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.CATALOG_NOT_SPECIFIED; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.COLUMN_NAME_NOT_SPECIFIED; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.COLUMN_TYPE_UNKNOWN; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.DUPLICATE_COLUMN_NAME; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.DUPLICATE_RELATION; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.INVALID_LITERAL; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.INVALID_ORDINAL; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.INVALID_SCHEMA_NAME; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.INVALID_WINDOW_FRAME; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISMATCHED_COLUMN_ALIASES; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISMATCHED_SET_COLUMN_TYPES; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISSING_ATTRIBUTE; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISSING_CATALOG; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISSING_COLUMN; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISSING_SCHEMA; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISSING_TABLE; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MULTIPLE_FIELDS_FROM_SUBQUERY; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MUST_BE_AGGREGATE_OR_GROUP_BY; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.NESTED_AGGREGATION; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.NESTED_WINDOW; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.NON_NUMERIC_SAMPLE_PERCENTAGE; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.NOT_SUPPORTED; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.ORDER_BY_MUST_BE_IN_SELECT; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.SAMPLE_PERCENTAGE_OUT_OF_RANGE; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.SCHEMA_NOT_SPECIFIED; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.STANDALONE_LAMBDA; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.TYPE_MISMATCH; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.VIEW_IS_STALE; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.WILDCARD_WITHOUT_FROM; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.WINDOW_REQUIRES_OVER; import static com.facebook.presto.testing.TestingSession.testSessionBuilder; import static com.facebook.presto.transaction.TransactionBuilder.transaction; import static com.facebook.presto.transaction.TransactionManager.createTestTransactionManager; import static java.lang.String.format; import static java.util.Collections.emptyList; import static org.testng.Assert.fail; @Test(singleThreaded = true) public class TestAnalyzer { private static final String TPCH_CATALOG = "tpch"; private static final ConnectorId TPCH_CONNECTOR_ID = new ConnectorId(TPCH_CATALOG); private static final String SECOND_CATALOG = "c2"; private static final ConnectorId SECOND_CONNECTOR_ID = new ConnectorId(SECOND_CATALOG); private static final String THIRD_CATALOG = "c3"; private static final ConnectorId THIRD_CONNECTOR_ID = new ConnectorId(THIRD_CATALOG); private static final Session SETUP_SESSION = testSessionBuilder() .setCatalog("c1") .setSchema("s1") .build(); private static final Session CLIENT_SESSION = testSessionBuilder() .setCatalog(TPCH_CATALOG) .setSchema("s1") .build(); private static final SqlParser SQL_PARSER = new SqlParser(); private TransactionManager transactionManager; private AccessControl accessControl; private Metadata metadata; @Test public void testNonComparableGroupBy() throws Exception { assertFails(TYPE_MISMATCH, "SELECT * FROM (SELECT approx_set(1)) GROUP BY 1"); } @Test public void testNonComparableWindowPartition() throws Exception { assertFails(TYPE_MISMATCH, "SELECT row_number() OVER (PARTITION BY t.x) FROM (VALUES(CAST (NULL AS HyperLogLog))) AS t(x)"); } @Test public void testNonComparableWindowOrder() throws Exception { assertFails(TYPE_MISMATCH, "SELECT row_number() OVER (ORDER BY t.x) FROM (VALUES(color('red'))) AS t(x)"); } @Test public void testNonComparableDistinctAggregation() throws Exception { assertFails(TYPE_MISMATCH, "SELECT count(DISTINCT x) FROM (SELECT approx_set(1) x)"); } @Test public void testNonComparableDistinct() throws Exception { assertFails(TYPE_MISMATCH, "SELECT DISTINCT * FROM (SELECT approx_set(1) x)"); assertFails(TYPE_MISMATCH, "SELECT DISTINCT x FROM (SELECT approx_set(1) x)"); } @Test public void testInSubqueryTypes() throws Exception { assertFails(TYPE_MISMATCH, "SELECT * FROM (VALUES 'a') t(y) WHERE y IN (VALUES 1)"); assertFails(TYPE_MISMATCH, "SELECT (VALUES true) IN (VALUES 1)"); } @Test public void testScalarSubQuery() throws Exception { analyze("SELECT 'a', (VALUES 1) GROUP BY 1"); analyze("SELECT 'a', (SELECT (1))"); analyze("SELECT * FROM t1 WHERE (VALUES 1) = 2"); analyze("SELECT * FROM t1 WHERE (VALUES 1) IN (VALUES 1)"); analyze("SELECT * FROM t1 WHERE (VALUES 1) IN (2)"); analyze("SELECT * FROM (SELECT 1) t1(x) WHERE x IN (SELECT 1)"); } @Test public void testHavingReferencesOutputAlias() throws Exception { assertFails(MISSING_ATTRIBUTE, "SELECT sum(a) x FROM t1 HAVING x > 5"); } @Test public void testWildcardWithInvalidPrefix() throws Exception { assertFails(MISSING_TABLE, "SELECT foo.* FROM t1"); } @Test public void testGroupByWithWildcard() throws Exception { assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT * FROM t1 GROUP BY 1"); assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT u1.*, u2.* FROM (select a, b + 1 from t1) u1 JOIN (select a, b + 2 from t1) u2 USING (a) GROUP BY u1.a, u2.a, 3"); } @Test public void testGroupByInvalidOrdinal() throws Exception { assertFails(INVALID_ORDINAL, "SELECT * FROM t1 GROUP BY 10"); assertFails(INVALID_ORDINAL, "SELECT * FROM t1 GROUP BY 0"); } @Test public void testOrderByInvalidOrdinal() throws Exception { assertFails(INVALID_ORDINAL, "SELECT * FROM t1 ORDER BY 10"); assertFails(INVALID_ORDINAL, "SELECT * FROM t1 ORDER BY 0"); } @Test public void testOrderByNonComparable() throws Exception { assertFails(TYPE_MISMATCH, "SELECT x FROM (SELECT approx_set(1) x) ORDER BY 1"); assertFails(TYPE_MISMATCH, "SELECT * FROM (SELECT approx_set(1) x) ORDER BY 1"); assertFails(TYPE_MISMATCH, "SELECT x FROM (SELECT approx_set(1) x) ORDER BY x"); } @Test public void testNestedAggregation() throws Exception { assertFails(NESTED_AGGREGATION, "SELECT sum(count(*)) FROM t1"); } @Test public void testAggregationsNotAllowed() throws Exception { assertFails(CANNOT_HAVE_AGGREGATIONS_OR_WINDOWS, "SELECT * FROM t1 WHERE sum(a) > 1"); assertFails(CANNOT_HAVE_AGGREGATIONS_OR_WINDOWS, "SELECT * FROM t1 GROUP BY sum(a)"); assertFails(CANNOT_HAVE_AGGREGATIONS_OR_WINDOWS, "SELECT * FROM t1 JOIN t2 ON sum(t1.a) = t2.a"); } @Test public void testWindowsNotAllowed() throws Exception { assertFails(CANNOT_HAVE_AGGREGATIONS_OR_WINDOWS, "SELECT * FROM t1 WHERE foo() over () > 1"); assertFails(CANNOT_HAVE_AGGREGATIONS_OR_WINDOWS, "SELECT * FROM t1 GROUP BY rank() over ()"); assertFails(CANNOT_HAVE_AGGREGATIONS_OR_WINDOWS, "SELECT * FROM t1 JOIN t2 ON sum(t1.a) over () = t2.a"); } @Test public void testInvalidTable() throws Exception { assertFails(MISSING_CATALOG, "SELECT * FROM foo.bar.t"); assertFails(MISSING_SCHEMA, "SELECT * FROM foo.t"); assertFails(MISSING_TABLE, "SELECT * FROM foo"); } @Test public void testInvalidSchema() throws Exception { assertFails(MISSING_SCHEMA, "SHOW TABLES FROM NONEXISTENT_SCHEMA"); assertFails(MISSING_SCHEMA, "SHOW TABLES IN NONEXISTENT_SCHEMA LIKE '%'"); } @Test public void testNonAggregate() throws Exception { assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT 'a', array[b][1] FROM t1 GROUP BY 1"); assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT a, sum(b) FROM t1"); assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT sum(b) / a FROM t1"); assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT sum(b) / a FROM t1 GROUP BY c"); assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT sum(b) FROM t1 ORDER BY a + 1"); assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT a, sum(b) FROM t1 GROUP BY a HAVING c > 5"); assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT count(*) over (PARTITION BY a) FROM t1 GROUP BY b"); assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT count(*) over (ORDER BY a) FROM t1 GROUP BY b"); assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT count(*) over (ORDER BY count(*) ROWS a PRECEDING) FROM t1 GROUP BY b"); assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT count(*) over (ORDER BY count(*) ROWS BETWEEN b PRECEDING AND a PRECEDING) FROM t1 GROUP BY b"); assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT count(*) over (ORDER BY count(*) ROWS BETWEEN a PRECEDING AND UNBOUNDED PRECEDING) FROM t1 GROUP BY b"); } @Test public void testInvalidAttribute() throws Exception { assertFails(MISSING_ATTRIBUTE, "SELECT f FROM t1"); assertFails(MISSING_ATTRIBUTE, "SELECT * FROM t1 ORDER BY f"); assertFails(MISSING_ATTRIBUTE, "SELECT count(*) FROM t1 GROUP BY f"); assertFails(MISSING_ATTRIBUTE, "SELECT * FROM t1 WHERE f > 1"); } @Test(expectedExceptions = SemanticException.class, expectedExceptionsMessageRegExp = "line 1:8: Column 't.y' cannot be resolved") public void testInvalidAttributeCorrectErrorMessage() throws Exception { analyze("SELECT t.y FROM (VALUES 1) t(x)"); } @Test public void testOrderByMustAppearInSelectWithDistinct() throws Exception { assertFails(ORDER_BY_MUST_BE_IN_SELECT, "SELECT DISTINCT a FROM t1 ORDER BY b"); } @Test public void testNonBooleanWhereClause() throws Exception { assertFails(TYPE_MISMATCH, "SELECT * FROM t1 WHERE a"); } @Test public void testDistinctAggregations() throws Exception { analyze("SELECT COUNT(DISTINCT a), SUM(a) FROM t1"); } @Test public void testMultipleDistinctAggregations() throws Exception { analyze("SELECT COUNT(DISTINCT a), COUNT(DISTINCT b) FROM t1"); } @Test public void testOrderByExpressionOnOutputColumn() throws Exception { assertFails(MISSING_ATTRIBUTE, "SELECT a x FROM t1 ORDER BY x + 1"); } @Test public void testOrderByExpressionOnOutputColumn2() throws Exception { // TODO: validate output analyze("SELECT a x FROM t1 ORDER BY a + 1"); } @Test public void testOrderByWithWildcard() throws Exception { // TODO: validate output analyze("SELECT a, t1.* FROM t1 ORDER BY a"); } @Test public void testMismatchedColumnAliasCount() throws Exception { assertFails(MISMATCHED_COLUMN_ALIASES, "SELECT * FROM t1 u (x, y)"); } @Test public void testJoinOnConstantExpression() throws Exception { analyze("SELECT * FROM t1 JOIN t2 ON 1 = 1"); } @Test public void testJoinOnNonBooleanExpression() throws Exception { assertFails(TYPE_MISMATCH, "SELECT * FROM t1 JOIN t2 ON 5"); } @Test public void testJoinOnAmbiguousName() throws Exception { assertFails(AMBIGUOUS_ATTRIBUTE, "SELECT * FROM t1 JOIN t2 ON a = a"); } @Test public void testNonEquiOuterJoin() throws Exception { analyze("SELECT * FROM t1 LEFT JOIN t2 ON t1.a + t2.a = 1"); analyze("SELECT * FROM t1 RIGHT JOIN t2 ON t1.a + t2.a = 1"); analyze("SELECT * FROM t1 LEFT JOIN t2 ON t1.a = t2.a OR t1.b = t2.b"); } @Test public void testNonBooleanHaving() throws Exception { assertFails(TYPE_MISMATCH, "SELECT sum(a) FROM t1 HAVING sum(a)"); } @Test public void testAmbiguousReferenceInOrderBy() throws Exception { assertFails(AMBIGUOUS_ATTRIBUTE, "SELECT a x, b x FROM t1 ORDER BY x"); } @Test public void testImplicitCrossJoin() { // TODO: validate output analyze("SELECT * FROM t1, t2"); } @Test public void testNaturalJoinNotSupported() throws Exception { assertFails(NOT_SUPPORTED, "SELECT * FROM t1 NATURAL JOIN t2"); } @Test public void testNestedWindowFunctions() throws Exception { assertFails(NESTED_WINDOW, "SELECT avg(sum(a) OVER ()) FROM t1"); assertFails(NESTED_WINDOW, "SELECT sum(sum(a) OVER ()) OVER () FROM t1"); assertFails(NESTED_WINDOW, "SELECT avg(a) OVER (PARTITION BY sum(b) OVER ()) FROM t1"); assertFails(NESTED_WINDOW, "SELECT avg(a) OVER (ORDER BY sum(b) OVER ()) FROM t1"); } @Test public void testWindowFunctionWithoutOverClause() { assertFails(WINDOW_REQUIRES_OVER, "SELECT row_number()"); assertFails(WINDOW_REQUIRES_OVER, "SELECT coalesce(lead(a), 0) from (values(0)) t(a)"); } @Test public void testInvalidWindowFrame() throws Exception { assertFails(INVALID_WINDOW_FRAME, "SELECT rank() OVER (ROWS UNBOUNDED FOLLOWING)"); assertFails(INVALID_WINDOW_FRAME, "SELECT rank() OVER (ROWS 2 FOLLOWING)"); assertFails(INVALID_WINDOW_FRAME, "SELECT rank() OVER (ROWS BETWEEN UNBOUNDED FOLLOWING AND CURRENT ROW)"); assertFails(INVALID_WINDOW_FRAME, "SELECT rank() OVER (ROWS BETWEEN CURRENT ROW AND UNBOUNDED PRECEDING)"); assertFails(INVALID_WINDOW_FRAME, "SELECT rank() OVER (ROWS BETWEEN CURRENT ROW AND 5 PRECEDING)"); assertFails(INVALID_WINDOW_FRAME, "SELECT rank() OVER (ROWS BETWEEN 2 FOLLOWING AND 5 PRECEDING)"); assertFails(INVALID_WINDOW_FRAME, "SELECT rank() OVER (ROWS BETWEEN 2 FOLLOWING AND CURRENT ROW)"); assertFails(INVALID_WINDOW_FRAME, "SELECT rank() OVER (RANGE 2 PRECEDING)"); assertFails(INVALID_WINDOW_FRAME, "SELECT rank() OVER (RANGE BETWEEN 2 PRECEDING AND CURRENT ROW)"); assertFails(INVALID_WINDOW_FRAME, "SELECT rank() OVER (RANGE BETWEEN CURRENT ROW AND 5 FOLLOWING)"); assertFails(INVALID_WINDOW_FRAME, "SELECT rank() OVER (RANGE BETWEEN 2 PRECEDING AND 5 FOLLOWING)"); assertFails(TYPE_MISMATCH, "SELECT rank() OVER (ROWS 0.5 PRECEDING)"); assertFails(TYPE_MISMATCH, "SELECT rank() OVER (ROWS 'foo' PRECEDING)"); assertFails(TYPE_MISMATCH, "SELECT rank() OVER (ROWS BETWEEN CURRENT ROW AND 0.5 FOLLOWING)"); assertFails(TYPE_MISMATCH, "SELECT rank() OVER (ROWS BETWEEN CURRENT ROW AND 'foo' FOLLOWING)"); } @Test public void testDistinctInWindowFunctionParameter() throws Exception { assertFails(NOT_SUPPORTED, "SELECT a, count(DISTINCT b) OVER () FROM t1"); } @Test public void testGroupByOrdinalsWithWildcard() throws Exception { // TODO: verify output analyze("SELECT t1.*, a FROM t1 GROUP BY 1,2,c,d"); } @Test public void testGroupByWithQualifiedName() throws Exception { // TODO: verify output analyze("SELECT a FROM t1 GROUP BY t1.a"); } @Test public void testGroupByWithQualifiedName2() throws Exception { // TODO: verify output analyze("SELECT t1.a FROM t1 GROUP BY a"); } @Test public void testGroupByWithQualifiedName3() throws Exception { // TODO: verify output analyze("SELECT * FROM t1 GROUP BY t1.a, t1.b, t1.c, t1.d"); } @Test public void testGroupByWithRowExpression() throws Exception { // TODO: verify output analyze("SELECT (a, b) FROM t1 GROUP BY a, b"); } @Test public void testHaving() throws Exception { // TODO: verify output analyze("SELECT sum(a) FROM t1 HAVING avg(a) - avg(b) > 10"); } @Test public void testWithCaseInsensitiveResolution() throws Exception { // TODO: verify output analyze("WITH AB AS (SELECT * FROM t1) SELECT * FROM ab"); } @Test public void testStartTransaction() throws Exception { analyze("START TRANSACTION"); analyze("START TRANSACTION ISOLATION LEVEL READ UNCOMMITTED"); analyze("START TRANSACTION ISOLATION LEVEL READ COMMITTED"); analyze("START TRANSACTION ISOLATION LEVEL REPEATABLE READ"); analyze("START TRANSACTION ISOLATION LEVEL SERIALIZABLE"); analyze("START TRANSACTION READ ONLY"); analyze("START TRANSACTION READ WRITE"); analyze("START TRANSACTION ISOLATION LEVEL READ COMMITTED, READ ONLY"); analyze("START TRANSACTION READ ONLY, ISOLATION LEVEL READ COMMITTED"); analyze("START TRANSACTION READ WRITE, ISOLATION LEVEL SERIALIZABLE"); } @Test public void testCommit() throws Exception { analyze("COMMIT"); analyze("COMMIT WORK"); } @Test public void testRollback() throws Exception { analyze("ROLLBACK"); analyze("ROLLBACK WORK"); } @Test public void testExplainAnalyze() throws Exception { analyze("EXPLAIN ANALYZE SELECT * FROM t1"); } @Test public void testInsert() throws Exception { assertFails(MISMATCHED_SET_COLUMN_TYPES, "INSERT INTO t6 (a) SELECT b from t6"); analyze("INSERT INTO t1 SELECT * FROM t1"); analyze("INSERT INTO t3 SELECT * FROM t3"); analyze("INSERT INTO t3 SELECT a, b FROM t3"); assertFails(MISMATCHED_SET_COLUMN_TYPES, "INSERT INTO t1 VALUES (1, 2)"); analyze("INSERT INTO t5 (a) VALUES(null)"); // ignore t5 hidden column analyze("INSERT INTO t5 VALUES (1)"); // fail if hidden column provided assertFails(MISMATCHED_SET_COLUMN_TYPES, "INSERT INTO t5 VALUES (1, 2)"); // note b is VARCHAR, while a,c,d are BIGINT analyze("INSERT INTO t6 (a) SELECT a from t6"); analyze("INSERT INTO t6 (a) SELECT c from t6"); analyze("INSERT INTO t6 (a,b,c,d) SELECT * from t6"); analyze("INSERT INTO t6 (A,B,C,D) SELECT * from t6"); analyze("INSERT INTO t6 (a,b,c,d) SELECT d,b,c,a from t6"); assertFails(MISMATCHED_SET_COLUMN_TYPES, "INSERT INTO t6 (a) SELECT b from t6"); assertFails(MISSING_COLUMN, "INSERT INTO t6 (unknown) SELECT * FROM t6"); assertFails(DUPLICATE_COLUMN_NAME, "INSERT INTO t6 (a, a) SELECT * FROM t6"); assertFails(DUPLICATE_COLUMN_NAME, "INSERT INTO t6 (a, A) SELECT * FROM t6"); // b is bigint, while a is double, coercion from b to a is possible analyze("INSERT INTO t7 (b) SELECT (a) FROM t7 "); assertFails(MISMATCHED_SET_COLUMN_TYPES, "INSERT INTO t7 (a) SELECT (b) FROM t7"); // d is array of bigints, while c is array of doubles, coercion from d to c is possible analyze("INSERT INTO t7 (d) SELECT (c) FROM t7 "); assertFails(MISMATCHED_SET_COLUMN_TYPES, "INSERT INTO t7 (c) SELECT (d) FROM t7 "); analyze("INSERT INTO t7 (d) VALUES (ARRAY[null])"); analyze("INSERT INTO t6 (d) VALUES (1), (2), (3)"); analyze("INSERT INTO t6 (a,b,c,d) VALUES (1, 'a', 1, 1), (2, 'b', 2, 2), (3, 'c', 3, 3), (4, 'd', 4, 4)"); } @Test public void testInvalidInsert() throws Exception { assertFails(MISSING_TABLE, "INSERT INTO foo VALUES (1)"); assertFails(NOT_SUPPORTED, "INSERT INTO v1 VALUES (1)"); // fail if inconsistent fields count assertFails(MISMATCHED_SET_COLUMN_TYPES, "INSERT INTO t1 (a) VALUES (1), (1, 2)"); assertFails(MISMATCHED_SET_COLUMN_TYPES, "INSERT INTO t1 (a, b) VALUES (1), (1, 2)"); assertFails(MISMATCHED_SET_COLUMN_TYPES, "INSERT INTO t1 (a, b) VALUES (1, 2), (1, 2), (1, 2, 3)"); assertFails(MISMATCHED_SET_COLUMN_TYPES, "INSERT INTO t1 (a, b) VALUES ('a', 'b'), ('a', 'b', 'c')"); // fail if mismatched column types assertFails(MISMATCHED_SET_COLUMN_TYPES, "INSERT INTO t1 (a, b) VALUES ('a', 'b'), (1, 'b')"); assertFails(MISMATCHED_SET_COLUMN_TYPES, "INSERT INTO t1 (a, b) VALUES ('a', 'b'), ('a', 'b'), (1, 'b')"); } @Test public void testDuplicateWithQuery() throws Exception { assertFails(DUPLICATE_RELATION, "WITH a AS (SELECT * FROM t1)," + " a AS (SELECT * FROM t1)" + "SELECT * FROM a"); } @Test public void testCaseInsensitiveDuplicateWithQuery() throws Exception { assertFails(DUPLICATE_RELATION, "WITH a AS (SELECT * FROM t1)," + " A AS (SELECT * FROM t1)" + "SELECT * FROM a"); } @Test public void testWithForwardReference() throws Exception { assertFails(MISSING_TABLE, "WITH a AS (SELECT * FROM b)," + " b AS (SELECT * FROM t1)" + "SELECT * FROM a"); } @Test public void testExpressions() throws Exception { // logical not assertFails(TYPE_MISMATCH, "SELECT NOT 1 FROM t1"); // logical and/or assertFails(TYPE_MISMATCH, "SELECT 1 AND TRUE FROM t1"); assertFails(TYPE_MISMATCH, "SELECT TRUE AND 1 FROM t1"); assertFails(TYPE_MISMATCH, "SELECT 1 OR TRUE FROM t1"); assertFails(TYPE_MISMATCH, "SELECT TRUE OR 1 FROM t1"); // comparison assertFails(TYPE_MISMATCH, "SELECT 1 = 'a' FROM t1"); // nullif assertFails(TYPE_MISMATCH, "SELECT NULLIF(1, 'a') FROM t1"); // case assertFails(TYPE_MISMATCH, "SELECT CASE WHEN TRUE THEN 'a' ELSE 1 END FROM t1"); assertFails(TYPE_MISMATCH, "SELECT CASE WHEN '1' THEN 1 ELSE 2 END FROM t1"); assertFails(TYPE_MISMATCH, "SELECT CASE 1 WHEN 'a' THEN 2 END FROM t1"); assertFails(TYPE_MISMATCH, "SELECT CASE 1 WHEN 1 THEN 2 ELSE 'a' END FROM t1"); // coalesce assertFails(TYPE_MISMATCH, "SELECT COALESCE(1, 'a') FROM t1"); // cast assertFails(TYPE_MISMATCH, "SELECT CAST(date '2014-01-01' AS bigint)"); assertFails(TYPE_MISMATCH, "SELECT TRY_CAST(date '2014-01-01' AS bigint)"); assertFails(TYPE_MISMATCH, "SELECT CAST(null AS UNKNOWN)"); assertFails(TYPE_MISMATCH, "SELECT CAST(1 AS MAP)"); assertFails(TYPE_MISMATCH, "SELECT CAST(1 AS ARRAY)"); assertFails(TYPE_MISMATCH, "SELECT CAST(1 AS ROW)"); // arithmetic unary assertFails(TYPE_MISMATCH, "SELECT -'a' FROM t1"); assertFails(TYPE_MISMATCH, "SELECT +'a' FROM t1"); // arithmetic addition/subtraction assertFails(TYPE_MISMATCH, "SELECT 'a' + 1 FROM t1"); assertFails(TYPE_MISMATCH, "SELECT 1 + 'a' FROM t1"); assertFails(TYPE_MISMATCH, "SELECT 'a' - 1 FROM t1"); assertFails(TYPE_MISMATCH, "SELECT 1 - 'a' FROM t1"); // like assertFails(TYPE_MISMATCH, "SELECT 1 LIKE 'a' FROM t1"); assertFails(TYPE_MISMATCH, "SELECT 'a' LIKE 1 FROM t1"); assertFails(TYPE_MISMATCH, "SELECT 'a' LIKE 'b' ESCAPE 1 FROM t1"); // extract assertFails(TYPE_MISMATCH, "SELECT EXTRACT(DAY FROM 'a') FROM t1"); // between assertFails(TYPE_MISMATCH, "SELECT 1 BETWEEN 'a' AND 2 FROM t1"); assertFails(TYPE_MISMATCH, "SELECT 1 BETWEEN 0 AND 'b' FROM t1"); assertFails(TYPE_MISMATCH, "SELECT 1 BETWEEN 'a' AND 'b' FROM t1"); // in assertFails(TYPE_MISMATCH, "SELECT * FROM t1 WHERE 1 IN ('a')"); assertFails(TYPE_MISMATCH, "SELECT * FROM t1 WHERE 'a' IN (1)"); assertFails(TYPE_MISMATCH, "SELECT * FROM t1 WHERE 'a' IN (1, 'b')"); // row type assertFails(TYPE_MISMATCH, "SELECT t.x.f1 FROM (VALUES 1) t(x)"); assertFails(TYPE_MISMATCH, "SELECT x.f1 FROM (VALUES 1) t(x)"); } @Test(enabled = false) // TODO: need to support widening conversion for numbers public void testInWithNumericTypes() throws Exception { analyze("SELECT * FROM t1 WHERE 1 IN (1, 2, 3.5)"); } @Test public void testWildcardWithoutFrom() throws Exception { assertFails(WILDCARD_WITHOUT_FROM, "SELECT *"); } @Test public void testReferenceWithoutFrom() throws Exception { assertFails(MISSING_ATTRIBUTE, "SELECT dummy"); } @Test public void testGroupBy() throws Exception { // TODO: validate output analyze("SELECT a, SUM(b) FROM t1 GROUP BY a"); } @Test public void testGroupByEmpty() throws Exception { assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT a FROM t1 GROUP BY ()"); } @Test public void testSingleGroupingSet() throws Exception { // TODO: validate output analyze("SELECT SUM(b) FROM t1 GROUP BY ()"); analyze("SELECT SUM(b) FROM t1 GROUP BY GROUPING SETS (())"); analyze("SELECT a, SUM(b) FROM t1 GROUP BY GROUPING SETS (a)"); analyze("SELECT a, SUM(b) FROM t1 GROUP BY GROUPING SETS (a)"); analyze("SELECT a, SUM(b) FROM t1 GROUP BY GROUPING SETS ((a, b))"); } @Test public void testMultipleGroupingSetMultipleColumns() throws Exception { // TODO: validate output analyze("SELECT a, SUM(b) FROM t1 GROUP BY GROUPING SETS ((a, b), (c, d))"); analyze("SELECT a, SUM(b) FROM t1 GROUP BY a, b, GROUPING SETS ((c, d))"); analyze("SELECT a, SUM(b) FROM t1 GROUP BY GROUPING SETS ((a), (c, d))"); analyze("SELECT a, SUM(b) FROM t1 GROUP BY GROUPING SETS ((a, b)), ROLLUP (c, d)"); analyze("SELECT a, SUM(b) FROM t1 GROUP BY GROUPING SETS ((a, b)), CUBE (c, d)"); } @Test public void testAggregateWithWildcard() throws Exception { assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "Column 1 not in GROUP BY clause", "SELECT * FROM (SELECT a + 1, b FROM t1) t GROUP BY b ORDER BY 1"); assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "Column 't.a' not in GROUP BY clause", "SELECT * FROM (SELECT a, b FROM t1) t GROUP BY b ORDER BY 1"); assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "Column 'a' not in GROUP BY clause", "SELECT * FROM (SELECT a, b FROM t1) GROUP BY b ORDER BY 1"); assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "Column 1 not in GROUP BY clause", "SELECT * FROM (SELECT a + 1, b FROM t1) GROUP BY b ORDER BY 1"); } @Test public void testGroupByCase() throws Exception { assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT CASE a WHEN 1 THEN 'a' ELSE 'b' END, count(*) FROM t1"); assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT CASE 1 WHEN 2 THEN a ELSE 0 END, count(*) FROM t1"); assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT CASE 1 WHEN 2 THEN 0 ELSE a END, count(*) FROM t1"); assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT CASE WHEN a = 1 THEN 'a' ELSE 'b' END, count(*) FROM t1"); assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT CASE WHEN true THEN a ELSE 0 END, count(*) FROM t1"); assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT CASE WHEN true THEN 0 ELSE a END, count(*) FROM t1"); } @Test public void testMismatchedUnionQueries() throws Exception { assertFails(TYPE_MISMATCH, "SELECT 1 UNION SELECT 'a'"); assertFails(TYPE_MISMATCH, "SELECT a FROM t1 UNION SELECT 'a'"); assertFails(TYPE_MISMATCH, "(SELECT 1) UNION SELECT 'a'"); assertFails(MISMATCHED_SET_COLUMN_TYPES, "SELECT 1, 2 UNION SELECT 1"); assertFails(MISMATCHED_SET_COLUMN_TYPES, "SELECT 'a' UNION SELECT 'b', 'c'"); assertFails(MISMATCHED_SET_COLUMN_TYPES, "TABLE t2 UNION SELECT 'a'"); } @Test public void testUnionUnmatchedOrderByAttribute() throws Exception { assertFails(MISSING_ATTRIBUTE, "TABLE t2 UNION ALL SELECT c, d FROM t1 ORDER BY c"); } @Test public void testGroupByComplexExpressions() throws Exception { assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT IF(a IS NULL, 1, 0) FROM t1 GROUP BY b"); assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT IF(a IS NOT NULL, 1, 0) FROM t1 GROUP BY b"); assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT IF(CAST(a AS VARCHAR) LIKE 'a', 1, 0) FROM t1 GROUP BY b"); assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT a IN (1, 2, 3) FROM t1 GROUP BY b"); assertFails(MUST_BE_AGGREGATE_OR_GROUP_BY, "SELECT 1 IN (a, 2, 3) FROM t1 GROUP BY b"); } @Test public void testNonNumericTableSamplePercentage() throws Exception { assertFails(NON_NUMERIC_SAMPLE_PERCENTAGE, "SELECT * FROM t1 TABLESAMPLE BERNOULLI ('a')"); assertFails(NON_NUMERIC_SAMPLE_PERCENTAGE, "SELECT * FROM t1 TABLESAMPLE BERNOULLI (a + 1)"); } @Test public void testTableSampleOutOfRange() throws Exception { assertFails(SAMPLE_PERCENTAGE_OUT_OF_RANGE, "SELECT * FROM t1 TABLESAMPLE BERNOULLI (-1)"); assertFails(SAMPLE_PERCENTAGE_OUT_OF_RANGE, "SELECT * FROM t1 TABLESAMPLE BERNOULLI (-101)"); } @Test public void testCreateTableAsColumns() throws Exception { assertFails(COLUMN_NAME_NOT_SPECIFIED, "CREATE TABLE test AS SELECT 123"); assertFails(DUPLICATE_COLUMN_NAME, "CREATE TABLE test AS SELECT 1 a, 2 a"); assertFails(COLUMN_TYPE_UNKNOWN, "CREATE TABLE test AS SELECT null a"); } @Test public void testCreateViewColumns() throws Exception { assertFails(COLUMN_NAME_NOT_SPECIFIED, "CREATE VIEW test AS SELECT 123"); assertFails(DUPLICATE_COLUMN_NAME, "CREATE VIEW test AS SELECT 1 a, 2 a"); assertFails(COLUMN_TYPE_UNKNOWN, "CREATE VIEW test AS SELECT null a"); } @Test public void testShowCreateView() { analyze("SHOW CREATE VIEW v1"); analyze("SHOW CREATE VIEW v2"); assertFails(NOT_SUPPORTED, "SHOW CREATE VIEW t1"); assertFails(MISSING_TABLE, "SHOW CREATE VIEW none"); } @Test public void testStaleView() throws Exception { assertFails(VIEW_IS_STALE, "SELECT * FROM v2"); } @Test public void testStoredViewAnalysisScoping() throws Exception { // the view must not be analyzed using the query context analyze("WITH t1 AS (SELECT 123 x) SELECT * FROM v1"); } @Test public void testStoredViewResolution() throws Exception { // the view must be analyzed relative to its own catalog/schema analyze("SELECT * FROM c3.s3.v3"); } @Test public void testQualifiedViewColumnResolution() throws Exception { // it should be possible to qualify the column reference with the view name analyze("SELECT v1.a FROM v1"); } @Test public void testViewWithUppercaseColumn() throws Exception { analyze("SELECT * FROM v4"); } @Test public void testUse() throws Exception { assertFails(NOT_SUPPORTED, "USE foo"); } @Test public void testNotNullInJoinClause() throws Exception { analyze("SELECT * FROM (VALUES (1)) a (x) JOIN (VALUES (2)) b ON a.x IS NOT NULL"); } @Test public void testIfInJoinClause() throws Exception { analyze("SELECT * FROM (VALUES (1)) a (x) JOIN (VALUES (2)) b ON IF(a.x = 1, true, false)"); } @Test public void testLiteral() throws Exception { assertFails(INVALID_LITERAL, "SELECT TIMESTAMP '2012-10-31 01:00:00 PT'"); } @Test public void testLambda() throws Exception { analyze("SELECT apply(5, x -> abs(x)) from t1"); assertFails(STANDALONE_LAMBDA, "SELECT x -> abs(x) from t1"); } @Test public void testInvalidDelete() throws Exception { assertFails(MISSING_TABLE, "DELETE FROM foo"); assertFails(NOT_SUPPORTED, "DELETE FROM v1"); assertFails(NOT_SUPPORTED, "DELETE FROM v1 WHERE a = 1"); } @Test public void testInvalidShowTables() { assertFails(INVALID_SCHEMA_NAME, "SHOW TABLES FROM a.b.c"); Session session = testSessionBuilder() .setCatalog(null) .setSchema(null) .build(); assertFails(session, CATALOG_NOT_SPECIFIED, "SHOW TABLES"); assertFails(session, CATALOG_NOT_SPECIFIED, "SHOW TABLES FROM a"); assertFails(session, MISSING_SCHEMA, "SHOW TABLES FROM c2.unknown"); session = testSessionBuilder() .setCatalog(SECOND_CATALOG) .setSchema(null) .build(); assertFails(session, SCHEMA_NOT_SPECIFIED, "SHOW TABLES"); assertFails(session, MISSING_SCHEMA, "SHOW TABLES FROM unknown"); } @Test public void testInvalidAtTimeZone() throws Exception { assertFails(TYPE_MISMATCH, "SELECT 'abc' AT TIME ZONE 'America/Los_Angeles'"); } @Test public void testValidJoinOnClause() throws Exception { analyze("SELECT * FROM (VALUES (2, 2)) a(x,y) JOIN (VALUES (2, 2)) b(x,y) ON TRUE"); analyze("SELECT * FROM (VALUES (2, 2)) a(x,y) JOIN (VALUES (2, 2)) b(x,y) ON 1=1"); analyze("SELECT * FROM (VALUES (2, 2)) a(x,y) JOIN (VALUES (2, 2)) b(x,y) ON a.x=b.x AND a.y=b.y"); analyze("SELECT * FROM (VALUES (2, 2)) a(x,y) JOIN (VALUES (2, 2)) b(x,y) ON NULL"); } @Test public void testInValidJoinOnClause() throws Exception { assertFails(TYPE_MISMATCH, "SELECT * FROM (VALUES (2, 2)) a(x,y) JOIN (VALUES (2, 2)) b(x,y) ON 1"); assertFails(TYPE_MISMATCH, "SELECT * FROM (VALUES (2, 2)) a(x,y) JOIN (VALUES (2, 2)) b(x,y) ON a.x + b.x"); assertFails(TYPE_MISMATCH, "SELECT * FROM (VALUES (2, 2)) a(x,y) JOIN (VALUES (2, 2)) b(x,y) ON ROW (TRUE)"); assertFails(TYPE_MISMATCH, "SELECT * FROM (VALUES (2, 2)) a(x,y) JOIN (VALUES (2, 2)) b(x,y) ON (a.x=b.x, a.y=b.y)"); } @Test public void testInvalidAggregationFilter() throws Exception { assertFails(NOT_SUPPORTED, "SELECT sum(x) FILTER (WHERE x > 1) OVER (PARTITION BY x) FROM (VALUES (1), (2), (2), (4)) t (x)"); assertFails(NOT_SUPPORTED, "SELECT count(DISTINCT x) FILTER (where y = 1) FROM (VALUES (1, 1)) t(x, y)"); } @Test public void testQuantifiedComparisonExpression() throws Exception { analyze("SELECT * FROM t1 WHERE t1.a <= ALL (VALUES 10, 20)"); assertFails(MULTIPLE_FIELDS_FROM_SUBQUERY, "SELECT * FROM t1 WHERE t1.a = ANY (SELECT 1, 2)"); assertFails(TYPE_MISMATCH, "SELECT * FROM t1 WHERE t1.a = SOME (VALUES ('abc'))"); // map is not orderable assertFails(TYPE_MISMATCH, ("SELECT map(ARRAY[1], ARRAY['hello']) < ALL (VALUES map(ARRAY[1], ARRAY['hello']))")); // but map is comparable analyze(("SELECT map(ARRAY[1], ARRAY['hello']) = ALL (VALUES map(ARRAY[1], ARRAY['hello']))")); // HLL is neither orderable nor comparable assertFails(TYPE_MISMATCH, "SELECT cast(NULL AS HyperLogLog) < ALL (VALUES cast(NULL AS HyperLogLog))"); assertFails(TYPE_MISMATCH, "SELECT cast(NULL AS HyperLogLog) = ANY (VALUES cast(NULL AS HyperLogLog))"); } @BeforeMethod(alwaysRun = true) public void setup() throws Exception { TypeManager typeManager = new TypeRegistry(); CatalogManager catalogManager = new CatalogManager(); transactionManager = createTestTransactionManager(catalogManager); accessControl = new AccessControlManager(transactionManager); metadata = new MetadataManager( new FeaturesConfig(), typeManager, new BlockEncodingManager(typeManager), new SessionPropertyManager(), new SchemaPropertyManager(), new TablePropertyManager(), transactionManager); metadata.getFunctionRegistry().addFunctions(ImmutableList.of(APPLY_FUNCTION)); catalogManager.registerCatalog(createTestingCatalog(TPCH_CATALOG, TPCH_CONNECTOR_ID)); catalogManager.registerCatalog(createTestingCatalog(SECOND_CATALOG, SECOND_CONNECTOR_ID)); catalogManager.registerCatalog(createTestingCatalog(THIRD_CATALOG, THIRD_CONNECTOR_ID)); SchemaTableName table1 = new SchemaTableName("s1", "t1"); inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG, new ConnectorTableMetadata(table1, ImmutableList.of( new ColumnMetadata("a", BIGINT), new ColumnMetadata("b", BIGINT), new ColumnMetadata("c", BIGINT), new ColumnMetadata("d", BIGINT))))); SchemaTableName table2 = new SchemaTableName("s1", "t2"); inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG, new ConnectorTableMetadata(table2, ImmutableList.of( new ColumnMetadata("a", BIGINT), new ColumnMetadata("b", BIGINT))))); SchemaTableName table3 = new SchemaTableName("s1", "t3"); inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG, new ConnectorTableMetadata(table3, ImmutableList.of( new ColumnMetadata("a", BIGINT), new ColumnMetadata("b", BIGINT), new ColumnMetadata("x", BIGINT, null, true))))); // table in different catalog SchemaTableName table4 = new SchemaTableName("s2", "t4"); inSetupTransaction(session -> metadata.createTable(session, SECOND_CATALOG, new ConnectorTableMetadata(table4, ImmutableList.of( new ColumnMetadata("a", BIGINT))))); // table with a hidden column SchemaTableName table5 = new SchemaTableName("s1", "t5"); inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG, new ConnectorTableMetadata(table5, ImmutableList.of( new ColumnMetadata("a", BIGINT), new ColumnMetadata("b", BIGINT, null, true))))); // table with a varchar column SchemaTableName table6 = new SchemaTableName("s1", "t6"); inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG, new ConnectorTableMetadata(table6, ImmutableList.of( new ColumnMetadata("a", BIGINT), new ColumnMetadata("b", VARCHAR), new ColumnMetadata("c", BIGINT), new ColumnMetadata("d", BIGINT))))); // table with bigint, double, array of bigints and array of doubles column SchemaTableName table7 = new SchemaTableName("s1", "t7"); inSetupTransaction(session -> metadata.createTable(session, TPCH_CATALOG, new ConnectorTableMetadata(table7, ImmutableList.of( new ColumnMetadata("a", BIGINT), new ColumnMetadata("b", DOUBLE), new ColumnMetadata("c", new ArrayType(BIGINT)), new ColumnMetadata("d", new ArrayType(DOUBLE)))))); // valid view referencing table in same schema String viewData1 = JsonCodec.jsonCodec(ViewDefinition.class).toJson( new ViewDefinition( "select a from t1", Optional.of(TPCH_CATALOG), Optional.of("s1"), ImmutableList.of(new ViewColumn("a", BIGINT)), Optional.of("user"))); inSetupTransaction(session -> metadata.createView(session, new QualifiedObjectName(TPCH_CATALOG, "s1", "v1"), viewData1, false)); // stale view (different column type) String viewData2 = JsonCodec.jsonCodec(ViewDefinition.class).toJson( new ViewDefinition( "select a from t1", Optional.of(TPCH_CATALOG), Optional.of("s1"), ImmutableList.of(new ViewColumn("a", VARCHAR)), Optional.of("user"))); inSetupTransaction(session -> metadata.createView(session, new QualifiedObjectName(TPCH_CATALOG, "s1", "v2"), viewData2, false)); // view referencing table in different schema from itself and session String viewData3 = JsonCodec.jsonCodec(ViewDefinition.class).toJson( new ViewDefinition( "select a from t4", Optional.of(SECOND_CATALOG), Optional.of("s2"), ImmutableList.of(new ViewColumn("a", BIGINT)), Optional.of("owner"))); inSetupTransaction(session -> metadata.createView(session, new QualifiedObjectName(THIRD_CATALOG, "s3", "v3"), viewData3, false)); // valid view with uppercase column name String viewData4 = JsonCodec.jsonCodec(ViewDefinition.class).toJson( new ViewDefinition( "select A from t1", Optional.of("tpch"), Optional.of("s1"), ImmutableList.of(new ViewColumn("a", BIGINT)), Optional.of("user"))); inSetupTransaction(session -> metadata.createView(session, new QualifiedObjectName("tpch", "s1", "v4"), viewData4, false)); this.metadata = metadata; } private void inSetupTransaction(Consumer<Session> consumer) { transaction(transactionManager, accessControl) .singleStatement() .readUncommitted() .execute(SETUP_SESSION, consumer); } private static Analyzer createAnalyzer(Session session, Metadata metadata) { return new Analyzer( session, metadata, SQL_PARSER, new AllowAllAccessControl(), Optional.empty(), emptyList()); } private void analyze(@Language("SQL") String query) { analyze(CLIENT_SESSION, query); } private void analyze(Session clientSession, @Language("SQL") String query) { transaction(transactionManager, accessControl) .singleStatement() .readUncommitted() .readOnly() .execute(clientSession, session -> { Analyzer analyzer = createAnalyzer(session, metadata); Statement statement = SQL_PARSER.createStatement(query); analyzer.analyze(statement); }); } private void assertFails(SemanticErrorCode error, @Language("SQL") String query) { assertFails(CLIENT_SESSION, error, query); } private void assertFails(SemanticErrorCode error, String message, @Language("SQL") String query) { assertFails(CLIENT_SESSION, error, message, query); } private void assertFails(Session session, SemanticErrorCode error, @Language("SQL") String query) { try { analyze(session, query); fail(format("Expected error %s, but analysis succeeded", error)); } catch (SemanticException e) { if (e.getCode() != error) { fail(format("Expected error %s, but found %s: %s", error, e.getCode(), e.getMessage()), e); } } } private void assertFails(Session session, SemanticErrorCode error, String message, @Language("SQL") String query) { try { analyze(session, query); fail(format("Expected error %s, but analysis succeeded", error)); } catch (SemanticException e) { if (e.getCode() != error) { fail(format("Expected error %s, but found %s: %s", error, e.getCode(), e.getMessage()), e); } if (!e.getMessage().equals(message)) { fail(format("Expected error '%s', but got '%s'", message, e.getMessage()), e); } } } private Catalog createTestingCatalog(String catalogName, ConnectorId connectorId) { ConnectorId systemId = createSystemTablesConnectorId(connectorId); Connector connector = createTestingConnector(); InternalNodeManager nodeManager = new InMemoryNodeManager(); return new Catalog( catalogName, connectorId, connector, createInformationSchemaConnectorId(connectorId), new InformationSchemaConnector(catalogName, nodeManager, metadata), systemId, new SystemConnector( systemId, nodeManager, connector.getSystemTables(), transactionId -> transactionManager.getConnectorTransaction(transactionId, connectorId))); } private static Connector createTestingConnector() { return new Connector() { private final ConnectorMetadata metadata = new TestingMetadata(); @Override public ConnectorTransactionHandle beginTransaction(IsolationLevel isolationLevel, boolean readOnly) { return new ConnectorTransactionHandle() {}; } @Override public ConnectorMetadata getMetadata(ConnectorTransactionHandle transaction) { return metadata; } @Override public ConnectorSplitManager getSplitManager() { throw new UnsupportedOperationException(); } }; } }
albertocsm/presto
presto-main/src/test/java/com/facebook/presto/sql/analyzer/TestAnalyzer.java
Java
apache-2.0
51,064
package com.jpattern.core.util; /** * * @author Francesco Cina' 09/nov/2010 - 08.51.03 * */ public abstract class ValueUtilExt extends com.jpattern.shared.util.ValueUtil { @SuppressWarnings("unchecked") public static <E> E castObjectNotNull(Class<E> aClass, Object objectToCast, E defaultValue) { if (objectToCast == null || !aClass.isInstance(objectToCast)) { return defaultValue; } return (E) objectToCast; } }
ufoscout/jpattern
core/src/main/java/com/jpattern/core/util/ValueUtilExt.java
Java
apache-2.0
435
using NUnit.Framework; using System; using Foundation; using OptimizelyiOS; namespace Optimizely.iOS.Xamarin.TutorialApp.Test { [TestFixture] public class OptimizelyCodeBlocksKeyTests { [Test] public void GetOptimizelyCodeBlocksKey() { try { OptimizelyCodeBlocksKey.GetOptimizelyCodeBlocksKey("string", new NSObject [] {}); } catch (Exception e) { Assert.Fail(e.Message); } Assert.Pass(); } } }
ScoreBig/XamarinBindings
Optimizely.iOS/Optimizely.iOS.TutorialApp.Tests/OptimizelyCodeBlocksKeyTests.cs
C#
apache-2.0
480
define([ './core', './var/indexOf', './traversing/var/rneedsContext', './core/init', './traversing/findFilter', './selector' ], function(jQuery, indexOf, rneedsContext) { var rparentsprev = /^(?:parents|prev(?:Until|All))/, // Methods guaranteed to produce a unique set when starting from a unique set guaranteedUnique = { children: true, contents: true, next: true, prev: true }; jQuery.extend({ dir: function(elem, dir, until) { var matched = [], truncate = until !== undefined; while ((elem = elem[dir]) && elem.nodeType !== 9) { if (elem.nodeType === 1) { if (truncate && jQuery(elem).is(until)) { break; } matched.push(elem); } } return matched; }, sibling: function(n, elem) { var matched = []; for (; n; n = n.nextSibling) { if (n.nodeType === 1 && n !== elem) { matched.push(n); } } return matched; } }); jQuery.fn.extend({ has: function(target) { var targets = jQuery(target, this), l = targets.length; return this.filter(function() { var i = 0; for (; i < l; i++) { if (jQuery.contains(this, targets[i])) { return true; } } }); }, closest: function(selectors, context) { var cur, i = 0, l = this.length, matched = [], pos = rneedsContext.test(selectors) || typeof selectors !== 'string' ? jQuery(selectors, context || this.context) : 0; for (; i < l; i++) { for (cur = this[i]; cur && cur !== context; cur = cur.parentNode) { // Always skip document fragments if (cur.nodeType < 11 && (pos ? pos.index(cur) > -1 : // Don't pass non-elements to Sizzle cur.nodeType === 1 && jQuery.find.matchesSelector(cur, selectors))) { matched.push(cur); break; } } } return this.pushStack(matched.length > 1 ? jQuery.unique(matched) : matched); }, // Determine the position of an element within the set index: function(elem) { // No argument, return index in parent if (!elem) { return (this[0] && this[0].parentNode) ? this.first().prevAll().length : -1; } // Index in selector if (typeof elem === 'string') { return indexOf.call(jQuery(elem), this[0]); } // Locate the position of the desired element return indexOf.call(this, // If it receives a jQuery object, the first element is used elem.jquery ? elem[0] : elem ); }, add: function(selector, context) { return this.pushStack( jQuery.unique( jQuery.merge(this.get(), jQuery(selector, context)) ) ); }, addBack: function(selector) { return this.add(selector == null ? this.prevObject : this.prevObject.filter(selector) ); } }); function sibling(cur, dir) { while ((cur = cur[dir]) && cur.nodeType !== 1) { } return cur; } jQuery.each({ parent: function(elem) { var parent = elem.parentNode; return parent && parent.nodeType !== 11 ? parent : null; }, parents: function(elem) { return jQuery.dir(elem, 'parentNode'); }, parentsUntil: function(elem, i, until) { return jQuery.dir(elem, 'parentNode', until); }, next: function(elem) { return sibling(elem, 'nextSibling'); }, prev: function(elem) { return sibling(elem, 'previousSibling'); }, nextAll: function(elem) { return jQuery.dir(elem, 'nextSibling'); }, prevAll: function(elem) { return jQuery.dir(elem, 'previousSibling'); }, nextUntil: function(elem, i, until) { return jQuery.dir(elem, 'nextSibling', until); }, prevUntil: function(elem, i, until) { return jQuery.dir(elem, 'previousSibling', until); }, siblings: function(elem) { return jQuery.sibling((elem.parentNode || {}).firstChild, elem); }, children: function(elem) { return jQuery.sibling(elem.firstChild); }, contents: function(elem) { return elem.contentDocument || jQuery.merge([], elem.childNodes); } }, function(name, fn) { jQuery.fn[name] = function(until, selector) { var matched = jQuery.map(this, fn, until); if (name.slice(-5) !== 'Until') { selector = until; } if (selector && typeof selector === 'string') { matched = jQuery.filter(selector, matched); } if (this.length > 1) { // Remove duplicates if (!guaranteedUnique[name]) { jQuery.unique(matched); } // Reverse order for parents* and prev-derivatives if (rparentsprev.test(name)) { matched.reverse(); } } return this.pushStack(matched); }; }); return jQuery; });
amido/Amido.VersionDashboard
src/Amido.VersionDashboard.Web/bower_components/jquery/src/traversing.js
JavaScript
apache-2.0
6,056
package com.waiso.social.framework.io; import java.io.Serializable; public class ProgressFileGeneration implements Serializable { private static final long serialVersionUID = 577962636625103658L; private String currentFileGeneration = ""; private int currentPercent = 0; private int totalFiles = 0; private int processed = 0; private int waitProcess = 0; public String getCurrentFileGeneration() { return currentFileGeneration; } public void setCurrentFileGeneration(String currentFileGeneration) { if (currentFileGeneration != null ){ int index = currentFileGeneration.lastIndexOf("/"); if (index != -1){ this.currentFileGeneration = currentFileGeneration.substring(index+1); }else{ this.currentFileGeneration = currentFileGeneration; } } } public int getCurrentPercent() { return currentPercent; } public void setCurrentPercent(int currentPercent) { this.currentPercent = currentPercent; } public int getTotalFiles() { return totalFiles; } public void setTotalFiles(int totalFiles) { this.totalFiles = totalFiles; } public void setProcessed(int processed){ this.processed = processed; } public int getProcessed(){ return processed; } public int getWaitProcess() { return waitProcess; } public void setWaitProcess(int waitProcess) { this.waitProcess = waitProcess; } public void calculaPercentual(){ this.currentPercent = (int)Math.floor(100 * processed /totalFiles); } public boolean finalized(){ return totalFiles <= processed; } }
g6tech/waiso-social
waiso-social-framework/src/main/java/com/waiso/social/framework/io/ProgressFileGeneration.java
Java
apache-2.0
1,519
/* * Copyright 2006 The National Library of New Zealand * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.webcurator.ui.profiles.renderers; import java.io.IOException; import javax.servlet.jsp.JspWriter; import javax.servlet.jsp.PageContext; import org.webcurator.core.profiles.ProfileElement; /** * Renders a text area input value. * @author bbeaumont * */ public class TextFieldRenderer extends Renderer { /* (non-Javadoc) * @see org.webcurator.ui.profiles.renderers.Renderer#render(org.webcurator.core.profiles.ProfileElement, javax.servlet.jsp.PageContext, org.webcurator.ui.profiles.renderers.RendererFilter) */ public void render(ProfileElement element, PageContext context, RendererFilter filter) throws IOException { JspWriter out = context.getOut(); out.print("<textarea name=\""); out.print(element.getAbsoluteName()); out.print("\">"); out.print(element.getValue()); out.print("</textarea><br/>"); } }
DIA-NZ/webcurator
wct-core/src/main/java/org/webcurator/ui/profiles/renderers/TextFieldRenderer.java
Java
apache-2.0
1,481
package com.roche.heatseq.process; import java.io.File; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import com.roche.sequencing.bioinformatics.common.utils.DateUtil; import com.roche.sequencing.bioinformatics.common.utils.probeinfo.Probe; public class H2ReadToProbeFileStore { // NOTE: This was part of an attempt to push a memory stored map into file storage. In the end the // query times were a big issue so the effort was stopped. This is being placed here just in case this solution // is needed in the short term. If you are reading this and have no idea what it is feel free to delete it. // Also see ReadToProbeAssignmentResultsWithFileStore // Kurt Heilman private static final String DB_DRIVER = "org.h2.Driver"; private static final String DB_FILE_CONNECTION_PREFIX = "jdbc:h2:file:"; private static final String DB_FILE_FAST_DATABASE_IMPORT_SUFFIX = ";LOG=0;CACHE_SIZE=65536;LOCK_MODE=0;UNDO_LOG=0"; private static final String DB_USER = ""; private static final String DB_PASSWORD = ""; private static final int INSERT_SIZE = 100000; private static final String READ_NAME_STRING_SIZE_PATTERN = "&READ_NAME_STRING_SIZE&"; private static final String PROBE_ID_STRING_SIZE_PATTERN = "&PROBE_ID_STRING_SIZE&"; private static final String CREATE_QUERY = "CREATE TABLE READ2PROBE(readName varchar(" + READ_NAME_STRING_SIZE_PATTERN + "), probeId varchar(" + PROBE_ID_STRING_SIZE_PATTERN + "));"; private static final String INSERT_QUERY = "INSERT INTO READ2PROBE" + "(readName, probeId) values" + "(?,?);"; private static final String SELECT_PROBE_ID_BY_READ_NAME_QUERY = "SELECT probeId FROM READ2PROBE WHERE readName = ?;"; private static final String SELECT_ALL_READ_NAMES_QUERY = "SELECT DISTINCT(readName) FROM READ2PROBE;"; private static final String ADD_READ_NAME_INDEX_QUERY = "CREATE INDEX IF NOT EXISTS readNameIndex ON READ2PROBE(readName);"; private static Connection connection; private final PreparedStatement insertStatement; private final PreparedStatement selectProbeIdByReadNameStatement; private final PreparedStatement selectAllReadNamesStatement; private final PreparedStatement createReadNameIndexStatement; public H2ReadToProbeFileStore(File dbFile, int readNameStringSize, int probeIdStringSize) { Connection connection = getDBConnection(dbFile); try { // create the table Statement statement = connection.createStatement(); String createQuery = CREATE_QUERY.replaceAll(READ_NAME_STRING_SIZE_PATTERN, "" + readNameStringSize); createQuery = createQuery.replaceAll(PROBE_ID_STRING_SIZE_PATTERN, "" + probeIdStringSize); statement.execute(createQuery); statement.close(); this.insertStatement = connection.prepareStatement(INSERT_QUERY); this.selectProbeIdByReadNameStatement = connection.prepareStatement(SELECT_PROBE_ID_BY_READ_NAME_QUERY); this.selectAllReadNamesStatement = connection.prepareStatement(SELECT_ALL_READ_NAMES_QUERY); this.createReadNameIndexStatement = connection.prepareStatement(ADD_READ_NAME_INDEX_QUERY); } catch (SQLException e) { throw new IllegalStateException(e.getMessage(), e); } Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { try { connection.close(); } catch (SQLException e) { throw new IllegalStateException(e.getMessage(), e); } for (File file : dbFile.getParentFile().listFiles()) { if (file.getName().startsWith(dbFile.getName())) { file.delete(); } } } }); } public void putAll(Map<String, Set<Probe>> readNameToProbe) { synchronized (insertStatement) { int count = 0; for (Entry<String, Set<Probe>> entry : readNameToProbe.entrySet()) { String readName = entry.getKey(); for (Probe probe : entry.getValue()) { try { insertStatement.setString(1, readName); insertStatement.setString(2, probe.getProbeId()); insertStatement.addBatch(); count++; } catch (SQLException e) { throw new IllegalStateException(e.getMessage(), e); } if (count > INSERT_SIZE) { try { insertStatement.executeBatch(); insertStatement.clearBatch(); } catch (SQLException e) { throw new IllegalStateException(e.getMessage(), e); } count = 0; } } } try { insertStatement.executeBatch(); insertStatement.clearBatch(); } catch (SQLException e) { throw new IllegalStateException(e.getMessage(), e); } } } public void indexReadNames() { System.out.println("starting to index"); long start = System.currentTimeMillis(); // index on read name try { createReadNameIndexStatement.executeUpdate(); } catch (SQLException e) { throw new IllegalStateException(e.getMessage(), e); } long stop = System.currentTimeMillis(); System.out.println("done indexing in:" + DateUtil.convertMillisecondsToHHMMSSMMM(stop - start)); } public Iterator<String> getReadNames() { Iterator<String> readNameIter = null; synchronized (selectAllReadNamesStatement) { try { ResultSet resultSet = selectAllReadNamesStatement.executeQuery(); readNameIter = new ResultSetIterator(resultSet); } catch (SQLException e) { throw new IllegalStateException(e.getMessage(), e); } } return readNameIter; } private static class ResultSetIterator implements Iterator<String> { private ResultSet resultSet; public ResultSetIterator(ResultSet resultSet) { this.resultSet = resultSet; } @Override public boolean hasNext() { boolean hasNext = false; if (resultSet != null) { try { hasNext = resultSet.next(); } catch (SQLException e) { hasNext = false; } } return hasNext; } @Override public String next() { String nextReadName = null; try { nextReadName = resultSet.getString("readName"); } catch (SQLException e) { throw new IllegalStateException(e.getMessage(), e); } return nextReadName; } } public Set<String> getProbeIds(String readName) { Set<String> probeIds = new HashSet<String>(); synchronized (selectProbeIdByReadNameStatement) { try { selectProbeIdByReadNameStatement.setString(1, readName); ResultSet resultSet = selectProbeIdByReadNameStatement.executeQuery(); while (resultSet.next()) { String probeId = resultSet.getString("probeId"); probeIds.add(probeId); } } catch (SQLException e) { // throw new IllegalStateException("Unable to retrieve probeIds for readName[" + readName + "]. " + e.getMessage(), e); try { selectProbeIdByReadNameStatement.setString(1, readName); ResultSet resultSet = selectProbeIdByReadNameStatement.executeQuery(); while (resultSet.next()) { String probeId = resultSet.getString("probeId"); probeIds.add(probeId); } } catch (SQLException e2) { } } } return probeIds; } public void close() { if (connection != null) { try { connection.close(); } catch (SQLException e) { throw new IllegalStateException(e.getMessage(), e); } } } private static Connection getDBConnection(File dbFile) { if (connection == null) { try { Class.forName(DB_DRIVER); } catch (ClassNotFoundException e) { System.out.println(e.getMessage()); } try { connection = DriverManager.getConnection(DB_FILE_CONNECTION_PREFIX + dbFile.getAbsolutePath() + DB_FILE_FAST_DATABASE_IMPORT_SUFFIX, DB_USER, DB_PASSWORD); } catch (SQLException e) { throw new IllegalStateException(e.getMessage(), e); } } return connection; } }
NimbleGen/bioinformatics
nimblegen_heatseq/src/test/java/com/roche/heatseq/process/H2ReadToProbeFileStore.java
Java
apache-2.0
8,044
/******************************************************************************* * Copyright 2017 osswangxining@163.com * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. ******************************************************************************/ /** * Copyright © 2016-2017 The Thingsboard Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.iotp.infomgt.dao.component; import java.util.List; import java.util.Optional; import org.apache.commons.lang3.StringUtils; import org.iotp.infomgt.dao.exception.DataValidationException; import org.iotp.infomgt.dao.exception.IncorrectParameterException; import org.iotp.infomgt.dao.util.DataValidator; import org.iotp.infomgt.dao.util.Validator; import org.iotp.infomgt.data.id.ComponentDescriptorId; import org.iotp.infomgt.data.page.TextPageData; import org.iotp.infomgt.data.page.TextPageLink; import org.iotp.infomgt.data.plugin.ComponentDescriptor; import org.iotp.infomgt.data.plugin.ComponentScope; import org.iotp.infomgt.data.plugin.ComponentType; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import com.fasterxml.jackson.databind.JsonNode; import com.github.fge.jsonschema.core.exceptions.ProcessingException; import com.github.fge.jsonschema.core.report.ProcessingReport; import com.github.fge.jsonschema.main.JsonSchemaFactory; import com.github.fge.jsonschema.main.JsonValidator; import lombok.extern.slf4j.Slf4j; /** * @author Andrew Shvayka */ @Service @Slf4j public class BaseComponentDescriptorService implements ComponentDescriptorService { @Autowired private ComponentDescriptorDao componentDescriptorDao; @Override public ComponentDescriptor saveComponent(ComponentDescriptor component) { componentValidator.validate(component); Optional<ComponentDescriptor> result = componentDescriptorDao.saveIfNotExist(component); if (result.isPresent()) { return result.get(); } else { return componentDescriptorDao.findByClazz(component.getClazz()); } } @Override public ComponentDescriptor findById(ComponentDescriptorId componentId) { Validator.validateId(componentId, "Incorrect component id for search request."); return componentDescriptorDao.findById(componentId); } @Override public ComponentDescriptor findByClazz(String clazz) { Validator.validateString(clazz, "Incorrect clazz for search request."); return componentDescriptorDao.findByClazz(clazz); } @Override public TextPageData<ComponentDescriptor> findByTypeAndPageLink(ComponentType type, TextPageLink pageLink) { Validator.validatePageLink(pageLink, "Incorrect PageLink object for search plugin components request."); List<ComponentDescriptor> components = componentDescriptorDao.findByTypeAndPageLink(type, pageLink); return new TextPageData<>(components, pageLink); } @Override public TextPageData<ComponentDescriptor> findByScopeAndTypeAndPageLink(ComponentScope scope, ComponentType type, TextPageLink pageLink) { Validator.validatePageLink(pageLink, "Incorrect PageLink object for search plugin components request."); List<ComponentDescriptor> components = componentDescriptorDao.findByScopeAndTypeAndPageLink(scope, type, pageLink); return new TextPageData<>(components, pageLink); } @Override public void deleteByClazz(String clazz) { Validator.validateString(clazz, "Incorrect clazz for delete request."); componentDescriptorDao.deleteByClazz(clazz); } @Override public boolean validate(ComponentDescriptor component, JsonNode configuration) { JsonValidator validator = JsonSchemaFactory.byDefault().getValidator(); try { if (!component.getConfigurationDescriptor().has("schema")) { throw new DataValidationException("Configuration descriptor doesn't contain schema property!"); } JsonNode configurationSchema = component.getConfigurationDescriptor().get("schema"); ProcessingReport report = validator.validate(configurationSchema, configuration); return report.isSuccess(); } catch (ProcessingException e) { throw new IncorrectParameterException(e.getMessage(), e); } } private DataValidator<ComponentDescriptor> componentValidator = new DataValidator<ComponentDescriptor>() { @Override protected void validateDataImpl(ComponentDescriptor plugin) { if (plugin.getType() == null) { throw new DataValidationException("Component type should be specified!."); } if (plugin.getScope() == null) { throw new DataValidationException("Component scope should be specified!."); } if (StringUtils.isEmpty(plugin.getName())) { throw new DataValidationException("Component name should be specified!."); } if (StringUtils.isEmpty(plugin.getClazz())) { throw new DataValidationException("Component clazz should be specified!."); } } }; }
osswangxining/iotplatform
iot-infomgt/iot-infomgt-dao/src/main/java/org/iotp/infomgt/dao/component/BaseComponentDescriptorService.java
Java
apache-2.0
6,382
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.mediaconvert.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.mediaconvert.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * H264QvbrSettingsMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class H264QvbrSettingsMarshaller { private static final MarshallingInfo<Integer> MAXAVERAGEBITRATE_BINDING = MarshallingInfo.builder(MarshallingType.INTEGER) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("maxAverageBitrate").build(); private static final MarshallingInfo<Integer> QVBRQUALITYLEVEL_BINDING = MarshallingInfo.builder(MarshallingType.INTEGER) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("qvbrQualityLevel").build(); private static final MarshallingInfo<Double> QVBRQUALITYLEVELFINETUNE_BINDING = MarshallingInfo.builder(MarshallingType.DOUBLE) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("qvbrQualityLevelFineTune").build(); private static final H264QvbrSettingsMarshaller instance = new H264QvbrSettingsMarshaller(); public static H264QvbrSettingsMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(H264QvbrSettings h264QvbrSettings, ProtocolMarshaller protocolMarshaller) { if (h264QvbrSettings == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(h264QvbrSettings.getMaxAverageBitrate(), MAXAVERAGEBITRATE_BINDING); protocolMarshaller.marshall(h264QvbrSettings.getQvbrQualityLevel(), QVBRQUALITYLEVEL_BINDING); protocolMarshaller.marshall(h264QvbrSettings.getQvbrQualityLevelFineTune(), QVBRQUALITYLEVELFINETUNE_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
aws/aws-sdk-java
aws-java-sdk-mediaconvert/src/main/java/com/amazonaws/services/mediaconvert/model/transform/H264QvbrSettingsMarshaller.java
Java
apache-2.0
2,717
package tool.weather; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import net.sf.json.JSONObject; import sign.Encode; import util.WeixinUtil; public class WeatherForcast { private static String BASE_URL = "http://api.map.baidu.com/telematics/v3/weather"; private static String AK = "XZWqDxzFwawX5Dxt8GrZFFrc"; private static String SK = "LOADFomNe0PIhcBe7gkhB6PtxvkQCje6"; public static String forcastByLocation(String lx, String ly){ StringBuffer sb = new StringBuffer(); String url = BASE_URL+"?location="+ly+","+lx+"&output=json&ak="+AK; // System.out.println(url); JSONObject jsonObject = WeixinUtil.doGetStr(url); String error_code = jsonObject.getString("error"); if("0".equals(error_code)&&"success".equalsIgnoreCase(jsonObject.getString("status"))){ WeatherResult weatherResult = (WeatherResult) JSONObject.toBean(jsonObject,WeatherResult.class); Results[] results = weatherResult.getResults(); // System.out.println(results.length); sb.append("当前城市是:"+results[0].getCurrentCity()+"\n"); // System.out.println(results[0].getPm25()); if(!results[0].getPm25().trim().equals("")&&null != results[0].getPm25().trim()){ sb.append("pm25为:"+results[0].getPm25()+"\n"); } sb.append("天气详情\n"); for(Weather_data wd:results[0].getWeather_data()){ sb.append(wd.getDate()+" "+wd.getWeather()+" "+wd.getTemperature()+"\n"); } sb.append("\n"); // System.out.println(sb); } else{ } // System.out.println(jsonObject); return sb.toString(); } public static String forcastByName(String city) throws UnsupportedEncodingException{ StringBuffer sb = new StringBuffer(); String url = BASE_URL+"?location="+URLEncoder.encode(city,"utf-8")+"&output=json&ak="+AK; JSONObject jsonObject = WeixinUtil.doGetStr(url); String error_code = jsonObject.getString("error"); if("0".equals(error_code)&&"success".equalsIgnoreCase(jsonObject.getString("status"))){ WeatherResult weatherResult = (WeatherResult) JSONObject.toBean(jsonObject,WeatherResult.class); Results[] results = weatherResult.getResults(); // System.out.println(results.length); sb.append("当前城市是:"+results[0].getCurrentCity()+"\n"); if(!results[0].getPm25().trim().equals("")&&null != results[0].getPm25().trim()){ sb.append("pm25为:"+results[0].getPm25()+"\n"); } sb.append("天气详情\n"); for(Weather_data wd:results[0].getWeather_data()){ sb.append(wd.getDate()+" "+wd.getWeather()+" "+wd.getTemperature()+"\n"); } sb.append("\n"); // System.out.println(weatherResult); } else{ sb.append("false"); } return sb.toString(); } public static String getSN(){ StringBuffer sn = new StringBuffer(""); try { sn.append(Encode.MD5(URLEncoder.encode(BASE_URL+SK,"utf-8")).toLowerCase()); } catch (UnsupportedEncodingException e) { // TODO Auto-generated catch block e.printStackTrace(); } return sn.toString(); } /** * @param args * @throws UnsupportedEncodingException */ public static void main(String[] args) throws UnsupportedEncodingException { // TODO Auto-generated method stub // System.out.println(forcastByLocation("30.265915", "119.950241")); System.out.println(forcastByName("缙云")); } }
Chaos777/WeixinT
src/tool/weather/WeatherForcast.java
Java
apache-2.0
3,330
import {ExecutionOutputGetResponse, ExecutionStatusGetResponse, JobWorkflowGetResponse, ExecutionOutput, ExecutionOutputEntry} from '@rundeck/client/dist/lib/models' import {Rundeck} from '@rundeck/client' import {RenderedStepList, JobWorkflow} from './JobWorkflow' type Omit<T, K extends keyof T> = Pick<T, Exclude<keyof T, K>> export interface IRenderedEntry extends ExecutionOutputEntry { renderedStep?: RenderedStepList renderedContext?: string lineNumber: number stepType?: string } export type EnrichedExecutionOutput = Omit<ExecutionOutput, 'entries'> & {entries: IRenderedEntry[]} const BACKOFF_MIN = 100 const BACKOFF_MAX = 5000 export class ExecutionLog { client: Rundeck offset = 0 size = 0 completed = false execCompleted = false backoff = 0 lineNumber = 0 private jobWorkflowProm!: Promise<JobWorkflow> private executionStatusProm!: Promise<ExecutionStatusGetResponse> constructor(readonly id: string, client?: Rundeck) { this.client = client || window._rundeck.rundeckClient } /** Optional method to populate information about execution output */ async init() { const resp = await this.client.executionOutputGet(this.id, {offset: '0', maxlines: 1}) this.execCompleted = resp.execCompleted this.size = resp.totalSize } async getJobWorkflow() { if(!this.jobWorkflowProm) { this.jobWorkflowProm = (async () => { const status = await this.getExecutionStatus() if(!status.job){ return new JobWorkflow([{exec:status.description,type:'exec',nodeStep:'true'}]) } let resp = await this.client.jobWorkflowGet(status.job!.id!) return new JobWorkflow(resp.workflow) })() } return this.jobWorkflowProm } async getExecutionStatus() { if (!this.executionStatusProm) this.executionStatusProm = this.client.executionStatusGet(this.id) return this.executionStatusProm } async getOutput(maxLines: number): Promise<ExecutionOutputGetResponse> { await this.waitBackOff() const res = await this.client.executionOutputGet(this.id, {offset: this.offset.toString(), maxlines: maxLines}) this.offset = parseInt(res.offset) this.size = res.totalSize this.completed = res.completed && res.execCompleted if (!this.completed && res.entries.length == 0) { this.increaseBackOff() } else { this.decreaseBackOff() } // console.log(`Backoff: ${this.backoff}`) // console.log(`Results: ${res.entries.length}`) return res } async waitBackOff() { if (this.backoff == 0) { return void(0) } else { return new Promise<void>((res, rej) => { setTimeout(res, this.backoff) }) } } private increaseBackOff() { // TODO: Jitter https://aws.amazon.com/blogs/architecture/exponential-backoff-and-jitter/ this.backoff = Math.min(Math.max(this.backoff, BACKOFF_MIN) * 2, BACKOFF_MAX) } private decreaseBackOff() { if (this.backoff == 0) return const backoff = this.backoff / 2 this.backoff = backoff < BACKOFF_MIN ? 0 : backoff } async getEnrichedOutput(maxLines: number): Promise<EnrichedExecutionOutput> { const [workflow, res] = await Promise.all([ this.getJobWorkflow(), this.getOutput(maxLines) ]) const enrichedEntries = res.entries.map(e => { this.lineNumber++ return { lineNumber: this.lineNumber, renderedStep: e.stepctx ? workflow.renderStepsFromContextPath(e.stepctx!) : undefined, renderedContext: e.stepctx ? workflow.renderContextString(e.stepctx!) : undefined, stepType: e.stepctx ? workflow.contextType(e.stepctx!) : undefined, ...e } }) return { ...res, entries: enrichedEntries } } }
rundeck/rundeck
rundeckapp/grails-spa/packages/ui-trellis/src/utilities/ExecutionLogConsumer.ts
TypeScript
apache-2.0
4,199
using System; using System.Collections.Generic; using System.Linq; using System.Runtime.Serialization; using System.Text; using System.Threading.Tasks; namespace SendInBlue { public enum ResponseTypes { [EnumMember(Value = "success")] Success, [EnumMember(Value = "failure")] Failure, [EnumMember(Value = "error")] Error } }
lademone/sendinblue.net
src/SendInBlue/Core/ResponseTypes.cs
C#
apache-2.0
391
package org.kymjs.kjframe.demo; import java.io.File; import java.io.FileNotFoundException; import org.kymjs.kjframe.KJActivity; import org.kymjs.kjframe.KJHttp; import org.kymjs.kjframe.http.HttpCallBack; import org.kymjs.kjframe.http.HttpConfig; import org.kymjs.kjframe.http.HttpParams; import org.kymjs.kjframe.ui.BindView; import org.kymjs.kjframe.ui.ViewInject; import org.kymjs.kjframe.utils.FileUtils; import org.kymjs.kjframe.utils.KJLoger; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.ProgressBar; public class HttpActivity extends KJActivity { @BindView(id = R.id.button1, click = true) private Button mBtn1; @BindView(id = R.id.button2, click = true) private Button mBtn2; @BindView(id = R.id.button3, click = true) private Button mBtn3; @BindView(id = R.id.button4, click = true) private Button mBtn4; @BindView(id = R.id.button5, click = true) private Button mBtn5; @BindView(id = R.id.button6, click = true) private Button mBtn6; @BindView(id = R.id.edittext) private EditText mEtDownloadPath; @BindView(id = R.id.progress) private ProgressBar mProgress; private final KJHttp kjh = new KJHttp(); @Override public void setRootView() { setContentView(R.layout.http); } @Override public void initWidget() { super.initWidget(); mBtn1.setText("GET请求"); mBtn2.setText("POST请求"); mBtn3.setText("文件上传"); mBtn4.setText("自定义(高级设置)"); mBtn5.setText("下载"); mBtn6.setText("暂停下载"); mEtDownloadPath.setText("http://192.168.1.145/kymjs/upload/1.pdf"); } @Override public void widgetClick(View v) { super.widgetClick(v); switch (v.getId()) { case R.id.button1: get(); break; case R.id.button2: post(); break; case R.id.button3: upload(); break; case R.id.button4: highRequest(); break; case R.id.button5: mBtn6.setVisibility(View.VISIBLE); download(); break; case R.id.button6: // 有断点下载功能,直接重新加载即可 if (kjh.isStopDownload()) { mBtn6.setText("暂停下载"); download(); } else { mBtn6.setText("继续"); kjh.stopDownload(); } break; } } private void get() { HttpConfig config = new HttpConfig();// 每个KJHttp对象对应一个config config.cachePath = "hello/world"; // 数据缓存到SD卡根目录hello文件夹中world文件夹内 config.cacheTime = 0;// 强制不使用缓存 // (你可以自己设置缓存时间,建议区分WiFi模式和3G网模式设置不同缓存时间并动态切换) config.httpHeader.put("cache", "kjlibrary");// 设置http请求头信息 config.maxRetries = 10;// 出错重连次数 KJHttp kjhttp = new KJHttp(config); kjhttp.get("http://www.oschina.net/", new HttpCallBack() { @Override public void onPreStart() { super.onPreStart(); KJLoger.debug("即将开始http请求"); } @Override public void onSuccess(String t) { super.onSuccess(t); ViewInject.longToast("GET请求成功"); KJLoger.debug("请求成功:" + t.toString()); } @Override public void onFailure(Throwable t, int errorNo, String strMsg) { super.onFailure(t, errorNo, strMsg); KJLoger.debug("出现异常:" + strMsg); } @Override public void onFinish() { super.onFinish(); KJLoger.debug("请求完成,不管成功还是失败"); } }); } private void post() { HttpParams params = new HttpParams(); params.put("id", "1"); params.put("name", "张涛"); kjh.post("http://192.168.1.149/post.php", params, new HttpCallBack() { @Override public void onSuccess(String t) { super.onSuccess(t); ViewInject.toast("POST成功:" + t.toString()); } @Override public void onFailure(Throwable t, int errorNo, String strMsg) { super.onFailure(t, errorNo, strMsg); ViewInject.toast("失败:" + strMsg); } }); } // 文件上传的PHP后台实现示例 // <?php // if ($_FILES["file"]["error"] > 0) // { // echo "Return Code: " . $_FILES["file"]["error"] . "<br />"; // } // else // { // echo "Upload: " . $_FILES["file"]["name"] . "<br />"; // echo "Type: " . $_FILES["file"]["type"] . "<br />"; // echo "Size: " . ($_FILES["file"]["size"] / 1024) . " Kb<br />"; // echo "Temp file: " . $_FILES["file"]["tmp_name"] . "<br />"; // // if (file_exists("upload/" . $_FILES["file"]["name"])) // { // echo $_FILES["file"]["name"] . " already exists. "; // } // else // { // move_uploaded_file($_FILES["file"]["tmp_name"], // "upload/" . $_FILES["file"]["name"]); // echo "Stored in: " . "upload/" . $_FILES["file"]["name"]; // } // } // ?> private void upload() { HttpParams params = new HttpParams(); try { params.put("file", FileUtils.getSaveFile("KJLibrary", "logo.jpg")); } catch (FileNotFoundException e) { e.printStackTrace(); } kjh.post("http://192.168.1.149/kymjs/hello.php", params, new HttpCallBack() { @Override public void onSuccess(String t) { super.onSuccess(t); ViewInject.toast("文件上传完成"); } @Override public void onFailure(Throwable t, int errorNo, String strMsg) { super.onFailure(t, errorNo, strMsg); ViewInject.toast("文件上传失败" + strMsg); } }); } private void highRequest() { HttpConfig config = new HttpConfig();// 每个KJHttp对象对应一个config config.cachePath = "hello/world"; // 数据缓存到SD卡根目录hello文件夹中world文件夹内 config.cacheTime = 0;// 强制不使用缓存 // (你可以自己设置缓存时间,建议区分WiFi模式和3G网模式设置不同缓存时间并动态切换) config.httpHeader.put("cache", "kjlibrary");// 设置http请求头信息 config.maxRetries = 10;// 出错重连次数 KJHttp kjhttp = new KJHttp(config); // //剩下的都是一样的了 ViewInject.toast("请查看代码中注释"); } private void download() { kjh.download(mEtDownloadPath.getText().toString(), FileUtils.getSaveFile("KJLibrary", "l.pdf"), new HttpCallBack() { @Override public void onSuccess(File f) { super.onSuccess(f); KJLoger.debug("success"); ViewInject.toast("下载成功"); mProgress.setProgress(mProgress.getMax()); } @Override public void onFailure(Throwable t, int errorNo, String strMsg) { super.onFailure(t, errorNo, strMsg); KJLoger.debug("onFailure"); } @Override public void onLoading(long count, long current) { super.onLoading(count, current); mProgress.setMax((int) count); mProgress.setProgress((int) current); KJLoger.debug(count + "------" + current); } }); } }
10045125/KJFrameForAndroid
KJLibraryExample/src/org/kymjs/kjframe/demo/HttpActivity.java
Java
apache-2.0
8,580
package org.stagemonitor.web; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.stagemonitor.core.CorePlugin; import org.stagemonitor.core.Stagemonitor; import org.stagemonitor.core.StagemonitorPlugin; import org.stagemonitor.core.configuration.Configuration; import org.stagemonitor.core.configuration.ConfigurationOption; import org.stagemonitor.core.configuration.converter.SetValueConverter; import org.stagemonitor.core.elasticsearch.ElasticsearchClient; import org.stagemonitor.core.grafana.GrafanaClient; import org.stagemonitor.core.util.ClassUtils; import org.stagemonitor.core.util.StringUtils; import org.stagemonitor.web.configuration.ConfigurationServlet; import org.stagemonitor.web.metrics.StagemonitorMetricsServlet; import org.stagemonitor.web.monitor.MonitoredHttpRequest; import org.stagemonitor.web.monitor.filter.HttpRequestMonitorFilter; import org.stagemonitor.web.monitor.filter.StagemonitorSecurityFilter; import org.stagemonitor.web.monitor.rum.RumServlet; import org.stagemonitor.web.monitor.servlet.StagemonitorFileServlet; import org.stagemonitor.web.monitor.widget.SpanServlet; import org.stagemonitor.web.monitor.widget.WidgetServlet; import org.stagemonitor.web.session.SessionCounter; import java.util.Arrays; import java.util.Collection; import java.util.EnumSet; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Pattern; import javax.servlet.DispatcherType; import javax.servlet.FilterRegistration; import javax.servlet.ServletContainerInitializer; import javax.servlet.ServletContext; import javax.servlet.ServletRegistration; import javax.servlet.http.HttpServletRequest; import static org.stagemonitor.core.pool.MBeanPooledResource.tomcatThreadPools; import static org.stagemonitor.core.pool.PooledResourceMetricsRegisterer.registerPooledResources; public class WebPlugin extends StagemonitorPlugin implements ServletContainerInitializer { public static final String STAGEMONITOR_SHOW_WIDGET = "X-Stagemonitor-Show-Widget"; private static final String WEB_PLUGIN = "Web Plugin"; private static final Logger logger = LoggerFactory.getLogger(WebPlugin.class); static { Stagemonitor.init(); } private final ConfigurationOption<Collection<Pattern>> requestParamsConfidential = ConfigurationOption.regexListOption() .key("stagemonitor.requestmonitor.http.requestparams.confidential.regex") .dynamic(true) .label("Deprecated: Confidential request parameters (regex)") .description("Deprecated, use stagemonitor.requestmonitor.requestparams.confidential.regex instead." + "A list of request parameter name patterns that should not be collected.\n" + "A request parameter is either a query string or a application/x-www-form-urlencoded request " + "body (POST form content)") .defaultValue(Arrays.asList( Pattern.compile("(?i).*pass.*"), Pattern.compile("(?i).*credit.*"), Pattern.compile("(?i).*pwd.*"))) .tags("security-relevant", "deprecated") .configurationCategory(WEB_PLUGIN) .build(); private ConfigurationOption<Boolean> collectHttpHeaders = ConfigurationOption.booleanOption() .key("stagemonitor.requestmonitor.http.collectHeaders") .dynamic(true) .label("Collect HTTP headers") .description("Whether or not HTTP headers should be collected with a call stack.") .defaultValue(true) .configurationCategory(WEB_PLUGIN) .tags("security-relevant") .build(); private ConfigurationOption<Boolean> parseUserAgent = ConfigurationOption.booleanOption() .key("stagemonitor.requestmonitor.http.parseUserAgent") .dynamic(true) .label("Analyze user agent") .description("Whether or not the user-agent header should be parsed and analyzed to get information " + "about the browser, device type and operating system. If you want to enable this option, you have " + "to add a dependency on net.sf.uadetector:uadetector-resources:2014.10. As this library is no longer " + "maintained, it is however recommended to use the Elasticsearch ingest user agent plugin. See " + "https://www.elastic.co/guide/en/elasticsearch/plugins/master/ingest-user-agent.html") .defaultValue(false) .tags("deprecated") .configurationCategory(WEB_PLUGIN) .build(); private ConfigurationOption<Collection<String>> excludeHeaders = ConfigurationOption.lowerStringsOption() .key("stagemonitor.requestmonitor.http.headers.excluded") .dynamic(true) .label("Do not collect headers") .description("A list of (case insensitive) header names that should not be collected.") .defaultValue(new LinkedHashSet<String>(Arrays.asList("cookie", "authorization", STAGEMONITOR_SHOW_WIDGET))) .configurationCategory(WEB_PLUGIN) .tags("security-relevant") .build(); private final ConfigurationOption<Boolean> widgetEnabled = ConfigurationOption.booleanOption() .key("stagemonitor.web.widget.enabled") .dynamic(true) .label("In browser widget enabled") .description("If active, stagemonitor will inject a widget in the web site containing the call tree. " + "If disabled, you can still enable it for authorized users by sending the HTTP header " + "`X-Stagemonitor-Show-Widget: <stagemonitor.password>`. You can use browser plugins like Modify " + "Headers for this. Note: if `stagemonitor.password` is set to an empty string, you can't disable the widget.\n" + "Requires Servlet-Api >= 3.0") .defaultValue(true) .configurationCategory(WEB_PLUGIN) .build(); private final ConfigurationOption<Map<Pattern, String>> groupUrls = ConfigurationOption.regexMapOption() .key("stagemonitor.groupUrls") .dynamic(true) .label("Group URLs regex") .description("Combine url paths by regex to a single url group.\n" + "E.g. `(.*).js: *.js` combines all URLs that end with `.js` to a group named `*.js`. " + "The metrics for all URLs matching the pattern are consolidated and shown in one row in the request table. " + "The syntax is `<regex>: <group name>[, <regex>: <group name>]*`") .defaultValue( new LinkedHashMap<Pattern, String>() {{ put(Pattern.compile("(.*).js$"), "*.js"); put(Pattern.compile("(.*).css$"), "*.css"); put(Pattern.compile("(.*).jpg$"), "*.jpg"); put(Pattern.compile("(.*).jpeg$"), "*.jpeg"); put(Pattern.compile("(.*).png$"), "*.png"); }}) .configurationCategory(WEB_PLUGIN) .build(); private final ConfigurationOption<Boolean> rumEnabled = ConfigurationOption.booleanOption() .key("stagemonitor.web.rum.enabled") .dynamic(true) .label("Enable Real User Monitoring") .description("The Real User Monitoring feature collects the browser, network and overall percieved " + "execution time from the user's perspective. When activated, a piece of javascript will be " + "injected to each html page that collects the data from real users and sends it back " + "to the server. Servlet API 3.0 or higher is required for this.") .defaultValue(true) .configurationCategory(WEB_PLUGIN) .build(); private final ConfigurationOption<Boolean> collectPageLoadTimesPerRequest = ConfigurationOption.booleanOption() .key("stagemonitor.web.collectPageLoadTimesPerRequest") .dynamic(true) .label("Collect Page Load Time data per request group") .description("Whether or not browser, network and overall execution time should be collected per request group.\n" + "If set to true, four additional timers will be created for each request group to record the page " + "rendering time, dom processing time, network time and overall time per request. " + "If set to false, the times of all requests will be aggregated.") .defaultValue(false) .configurationCategory(WEB_PLUGIN) .build(); private final ConfigurationOption<Collection<String>> excludedRequestPaths = ConfigurationOption.stringsOption() .key("stagemonitor.web.paths.excluded") .dynamic(false) .label("Excluded paths") .description("Request paths that should not be monitored. " + "A value of `/aaa` means, that all paths starting with `/aaa` should not be monitored." + " It's recommended to not monitor static resources, as they are typically not interesting to " + "monitor but consume resources when you do.") .defaultValue(SetValueConverter.immutableSet( // exclude paths of static vaadin resources "/VAADIN/", // don't monitor vaadin heatbeat "/HEARTBEAT/")) .configurationCategory(WEB_PLUGIN) .build(); private final ConfigurationOption<String> metricsServletAllowedOrigin = ConfigurationOption.stringOption() .key("stagemonitor.web.metricsServlet.allowedOrigin") .dynamic(true) .label("Allowed origin") .description("The Access-Control-Allow-Origin header value for the metrics servlet.") .defaultValue(null) .configurationCategory(WEB_PLUGIN) .build(); private final ConfigurationOption<String> metricsServletJsonpParameter = ConfigurationOption.stringOption() .key("stagemonitor.web.metricsServlet.jsonpParameter") .dynamic(true) .label("The Jsonp callback parameter name") .description("The name of the parameter used to specify the jsonp callback.") .defaultValue(null) .configurationCategory(WEB_PLUGIN) .build(); private ConfigurationOption<Boolean> monitorOnlySpringMvcOption = ConfigurationOption.booleanOption() .key("stagemonitor.requestmonitor.spring.monitorOnlySpringMvcRequests") .dynamic(true) .label("Monitor only SpringMVC requests") .description("Whether or not requests should be ignored, if they will not be handled by a Spring MVC controller method.\n" + "This is handy, if you are not interested in the performance of serving static files. " + "Setting this to true can also significantly reduce the amount of files (and thus storing space) " + "Graphite will allocate.") .defaultValue(false) .configurationCategory("Spring MVC Plugin") .build(); private ConfigurationOption<Boolean> monitorOnlyResteasyOption = ConfigurationOption.booleanOption() .key("stagemonitor.requestmonitor.resteasy.monitorOnlyResteasyRequests") .dynamic(true) .label("Monitor only Resteasy reqeusts") .description("Whether or not requests should be ignored, if they will not be handled by a Resteasy resource method.\n" + "This is handy, if you are not interested in the performance of serving static files. " + "Setting this to true can also significantly reduce the amount of files (and thus storing space) " + "Graphite will allocate.") .defaultValue(false) .configurationCategory("Resteasy Plugin") .build(); private ConfigurationOption<Collection<String>> requestExceptionAttributes = ConfigurationOption.stringsOption() .key("stagemonitor.requestmonitor.requestExceptionAttributes") .dynamic(true) .label("Request Exception Attributes") .description("Defines the list of attribute names to check on the HttpServletRequest when searching for an exception. \n\n" + "Stagemonitor searches this list in order to see if any of these attributes are set on the request with " + "an Exception object and then records that information on the span. If your web framework " + "sets a different attribute outside of the defaults, you can add that attribute to this list to properly " + "record the exception on the trace.") .defaultValue(new LinkedHashSet<String>() {{ add("javax.servlet.error.exception"); add("exception"); add("org.springframework.web.servlet.DispatcherServlet.EXCEPTION"); }}) .configurationCategory(WEB_PLUGIN) .build(); private ConfigurationOption<Boolean> honorDoNotTrackHeader = ConfigurationOption.booleanOption() .key("stagemonitor.web.honorDoNotTrackHeader") .dynamic(true) .label("Honor do not track header") .description("When set to true, requests that include the dnt header won't be reported. " + "Depending on your use case you might not be required to stop reporting spans even " + "if dnt is set. See https://tools.ietf.org/html/draft-mayer-do-not-track-00#section-9.3") .defaultValue(false) .tags("privacy") .configurationCategory(WEB_PLUGIN) .build(); private SpanServlet spanServlet; @Override public void initializePlugin(StagemonitorPlugin.InitArguments initArguments) { registerPooledResources(initArguments.getMetricRegistry(), tomcatThreadPools()); final CorePlugin corePlugin = initArguments.getPlugin(CorePlugin.class); ElasticsearchClient elasticsearchClient = corePlugin.getElasticsearchClient(); if (corePlugin.isReportToGraphite()) { elasticsearchClient.sendGrafana1DashboardAsync("grafana/Grafana1GraphiteServer.json"); elasticsearchClient.sendGrafana1DashboardAsync("grafana/Grafana1GraphiteKPIsOverTime.json"); } if (corePlugin.isReportToElasticsearch()) { final GrafanaClient grafanaClient = corePlugin.getGrafanaClient(); elasticsearchClient.sendClassPathRessourceBulkAsync("kibana/Application-Server.bulk"); grafanaClient.sendGrafanaDashboardAsync("grafana/ElasticsearchApplicationServer.json"); } spanServlet.onStagemonitorStarted(); } @Override public List<ConfigurationOption<?>> getConfigurationOptions() { final List<ConfigurationOption<?>> configurationOptions = super.getConfigurationOptions(); if (!ClassUtils.isPresent("org.springframework.web.servlet.HandlerMapping")) { configurationOptions.remove(monitorOnlySpringMvcOption); } if (!ClassUtils.isPresent("org.jboss.resteasy.core.ResourceMethodRegistry")) { configurationOptions.remove(monitorOnlyResteasyOption); } return configurationOptions; } public boolean isCollectHttpHeaders() { return collectHttpHeaders.getValue(); } public boolean isParseUserAgent() { return parseUserAgent.getValue(); } public Collection<String> getExcludeHeaders() { return excludeHeaders.getValue(); } public boolean isWidgetEnabled() { return widgetEnabled.getValue(); } public Map<Pattern, String> getGroupUrls() { return groupUrls.getValue(); } public Collection<Pattern> getRequestParamsConfidential() { return requestParamsConfidential.getValue(); } public boolean isRealUserMonitoringEnabled() { return rumEnabled.getValue(); } public boolean isCollectPageLoadTimesPerRequest() { return collectPageLoadTimesPerRequest.getValue(); } public Collection<String> getExcludedRequestPaths() { return excludedRequestPaths.getValue(); } public String getMetricsServletAllowedOrigin() { return metricsServletAllowedOrigin.getValue(); } public String getMetricsServletJsonpParamName() { return metricsServletJsonpParameter.getValue(); } public boolean isWidgetAndStagemonitorEndpointsAllowed(HttpServletRequest request, Configuration configuration) { final Boolean showWidgetAttr = (Boolean) request.getAttribute(STAGEMONITOR_SHOW_WIDGET); if (showWidgetAttr != null) { logger.debug("isWidgetAndStagemonitorEndpointsAllowed: showWidgetAttr={}", showWidgetAttr); return showWidgetAttr; } final boolean widgetEnabled = isWidgetEnabled(); final boolean passwordInShowWidgetHeaderCorrect = isPasswordInShowWidgetHeaderCorrect(request, configuration); final boolean result = widgetEnabled || passwordInShowWidgetHeaderCorrect; logger.debug("isWidgetAndStagemonitorEndpointsAllowed: isWidgetEnabled={}, isPasswordInShowWidgetHeaderCorrect={}, result={}", widgetEnabled, passwordInShowWidgetHeaderCorrect, result); return result; } private boolean isPasswordInShowWidgetHeaderCorrect(HttpServletRequest request, Configuration configuration) { String password = request.getHeader(STAGEMONITOR_SHOW_WIDGET); if (configuration.isPasswordCorrect(password)) { return true; } else { if (StringUtils.isNotEmpty(password)) { logger.error("The password transmitted via the header {} is not correct. " + "This might be a malicious attempt to guess the value of {}. " + "The request was initiated from the ip {}.", STAGEMONITOR_SHOW_WIDGET, Stagemonitor.STAGEMONITOR_PASSWORD, MonitoredHttpRequest.getClientIp(request)); } return false; } } public boolean isMonitorOnlySpringMvcRequests() { return monitorOnlySpringMvcOption.getValue(); } public boolean isMonitorOnlyResteasyRequests() { return monitorOnlyResteasyOption.getValue(); } public Collection<String> getRequestExceptionAttributes() { return requestExceptionAttributes.getValue(); } public boolean isHonorDoNotTrackHeader() { return honorDoNotTrackHeader.getValue(); } @Override public void onStartup(Set<Class<?>> c, ServletContext ctx) { ctx.addServlet(ConfigurationServlet.class.getSimpleName(), new ConfigurationServlet()) .addMapping(ConfigurationServlet.CONFIGURATION_ENDPOINT); ctx.addServlet(StagemonitorMetricsServlet.class.getSimpleName(), new StagemonitorMetricsServlet()) .addMapping("/stagemonitor/metrics"); ctx.addServlet(RumServlet.class.getSimpleName(), new RumServlet()) .addMapping("/stagemonitor/public/rum"); ctx.addServlet(StagemonitorFileServlet.class.getSimpleName(), new StagemonitorFileServlet()) .addMapping("/stagemonitor/static/*", "/stagemonitor/public/static/*"); ctx.addServlet(WidgetServlet.class.getSimpleName(), new WidgetServlet()) .addMapping("/stagemonitor"); this.spanServlet = new SpanServlet(); final ServletRegistration.Dynamic spanServlet = ctx.addServlet(SpanServlet.class.getSimpleName(), this.spanServlet); spanServlet.addMapping("/stagemonitor/spans"); spanServlet.setAsyncSupported(true); final FilterRegistration.Dynamic securityFilter = ctx.addFilter(StagemonitorSecurityFilter.class.getSimpleName(), new StagemonitorSecurityFilter()); // Add as last filter so that other filters have the chance to set the // WebPlugin.STAGEMONITOR_SHOW_WIDGET request attribute that overrides the widget visibility. // That way the application can decide whether a particular user is allowed to see the widget.P securityFilter.addMappingForUrlPatterns(EnumSet.of(DispatcherType.REQUEST), true, "/stagemonitor/*"); securityFilter.setAsyncSupported(true); final FilterRegistration.Dynamic monitorFilter = ctx.addFilter(HttpRequestMonitorFilter.class.getSimpleName(), new HttpRequestMonitorFilter()); monitorFilter.addMappingForUrlPatterns(EnumSet.of(DispatcherType.REQUEST), false, "/*"); monitorFilter.setAsyncSupported(true); try { ctx.addListener(SessionCounter.class); } catch (IllegalArgumentException e) { // embedded servlet containers like jetty don't necessarily support sessions } } }
glamarre360/stagemonitor
stagemonitor-web/src/main/java/org/stagemonitor/web/WebPlugin.java
Java
apache-2.0
18,647
package com.hss01248.dialog.bottomsheet; import android.content.Context; import android.view.View; import android.widget.ImageView; import android.widget.TextView; import com.hss01248.dialog.R; import com.hss01248.dialog.adapter.SuperLvHolder; /** * Created by Administrator on 2016/10/19. */ public class BsGvHolder extends SuperLvHolder<BottomSheetBean> { public ImageView ivIcon; public TextView mTextView; public BsGvHolder(Context context){ super(context); ivIcon = (ImageView) rootView.findViewById(R.id.iv_icon); mTextView = (TextView) rootView.findViewById(R.id.tv_msg); } @Override protected int setLayoutRes() { return R.layout.item_bottomsheet_gv; } @Override public void assingDatasAndEvents(Context context, BottomSheetBean bean) { if (bean.icon<=0){ ivIcon.setVisibility(View.GONE); }else { ivIcon.setImageResource(bean.icon); ivIcon.setVisibility(View.VISIBLE); } mTextView.setText(bean.text); /* rootView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { } });*/ } }
weiwenqiang/GitHub
Dialog/DialogUtil-master/dialog/src/main/java/com/hss01248/dialog/bottomsheet/BsGvHolder.java
Java
apache-2.0
1,224
'use strict'; angular.module('sproutStudyApp') .controller('settingsController', function ($scope, $location, $routeParams, settingsService) { $scope.settings = null; $scope.user = null; settingsService.getUser(null, function(data) { $scope.user = data; }); });
stephenlorenz/sproutstudy
sproutStudy_web/src/main/webapp/scripts/controllers/settingsController.js
JavaScript
apache-2.0
294
/* * Copyright 2017 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.utils; import com.thoughtworks.go.util.FileUtil; import org.junit.rules.TemporaryFolder; import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.UUID; import java.util.regex.Matcher; import java.util.regex.Pattern; import static com.thoughtworks.go.util.ExceptionUtils.bomb; import static com.thoughtworks.go.utils.CommandUtils.exec; public class SvnRepoFixture extends TestRepoFixture { private HashMap<String, File> workspaces = new HashMap<>(); public SvnRepoFixture(String svnRepoPath, TemporaryFolder temporaryFolder) { super(svnRepoPath, temporaryFolder); } public void onSetUp() { } public void onTearDown() { } public String getRepoUrl() throws IOException { return getEnd2EndRepoUrl(); } public static String getRepoUrl(File repositoryRoot, String project) { String url = FileUtil.toFileURI(new File(repositoryRoot, project)); return url.replaceAll(" ", "%20"); } public String getEnd2EndRepoUrl() throws IOException { return getRepoUrl(currentRepository(), "end2end"); } public String getExternalRepoUrl() throws IOException { return getRepoUrl(currentRepository(), "project1"); } public String getConnect4DotNetRepoUrl() throws IOException { return getRepoUrl(currentRepository(), "connect4.net"); } public void addFileAndCheckIn() throws IOException { addFileAndCheckIn(getEnd2EndRepoUrl(), "test", "readme" + UUID.randomUUID() + ".txt"); } public void checkinNewFilesWithMessage(String message, String... files) throws Exception { File workspace = workspaceOf(getEnd2EndRepoUrl()); for (String path : files) { svnadd(path, workspace); } checkin(message, workspace); } public String addFileAndCheckIn(String svnRepoUrl, String comment, String fileName) throws IOException { File workspace = workspaceOf(svnRepoUrl); svnadd(fileName, workspace); checkin(comment, workspace); return fileName; } private void svnadd(String fileName, File workspace) throws IOException { File newFile = new File(workspace, fileName); newFile.createNewFile(); exec(workspace, "svn", "add", newFile.getName()); } private void checkin(String comment, File workspace) { exec(workspace, "svn", "ci", "-m", comment, "--username", "twist-test"); exec(workspace, "svn", "up"); } public File checkout(String svnRepoURL) throws IOException { File workspace = temporaryFolder.newFolder(); workspaces.put(svnRepoURL, workspace); exec(workspace, "svn", "co", svnRepoURL, "."); return workspace; } public String log() throws IOException { return exec("svn", "log", "--non-interactive", "--xml", "-v", getEnd2EndRepoUrl()); } public void createExternals() throws IOException { String end2EndRepoUrl = getEnd2EndRepoUrl(); createExternals(end2EndRepoUrl); } public void createExternals(String svnRepoUrl) throws IOException { File workspace = workspaceOf(svnRepoUrl); exec(workspace, "svn", "propset", "svn:externals", "external " + getExternalRepoUrl(), "."); exec(workspace, "svn", "ci", "-m", "created svn externals"); } public String getRevision(File folder) { String info = exec(folder, "svn", "info", "--xml"); return parseRevisionFromSvnInfo(info); } public static String parseRevisionFromSvnInfo(String svnInfo) { String s = svnInfo.replaceAll("\\s", " "); Pattern pattern = Pattern.compile(".*revision=\"(\\d+)\".*"); Matcher matcher = pattern.matcher(s); if (matcher.matches()) { return matcher.group(1); } throw bomb("Can not parse revision from svninfo: \n" + svnInfo); } private File workspaceOf(String svnRepoUrl) throws IOException { File workspace = workspaces.get(svnRepoUrl); if (workspace == null) { workspace = checkout(svnRepoUrl); } return workspace; } public String getHeadRevision(String svnRepoUrl) throws IOException { File workspace = workspaceOf(svnRepoUrl); return getRevision(workspace); } }
stevem999/gocd
test-utils/src/main/java/com/thoughtworks/go/utils/SvnRepoFixture.java
Java
apache-2.0
4,961
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.coordination; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskConfig; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.LocalClusterUpdateTask; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.coordination.ClusterFormationFailureHelper.ClusterFormationState; import org.elasticsearch.cluster.coordination.CoordinationMetadata.VotingConfigExclusion; import org.elasticsearch.cluster.coordination.CoordinationMetadata.VotingConfiguration; import org.elasticsearch.cluster.coordination.CoordinationState.VoteCollection; import org.elasticsearch.cluster.coordination.FollowersChecker.FollowerCheckRequest; import org.elasticsearch.cluster.coordination.JoinHelper.InitialJoinAccumulator; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RerouteService; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterApplier; import org.elasticsearch.cluster.service.ClusterApplier.ClusterApplyListener; import org.elasticsearch.cluster.service.MasterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ListenableFuture; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.DiscoveryStats; import org.elasticsearch.discovery.HandshakingTransportAddressConnector; import org.elasticsearch.discovery.PeerFinder; import org.elasticsearch.discovery.SeedHostsProvider; import org.elasticsearch.discovery.SeedHostsResolver; import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.transport.TransportResponse.Empty; import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Random; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.BiConsumer; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.stream.StreamSupport; import static org.elasticsearch.cluster.coordination.NoMasterBlockService.NO_MASTER_BLOCK_ID; import static org.elasticsearch.gateway.ClusterStateUpdaters.hideStateIfNotRecovered; import static org.elasticsearch.gateway.GatewayService.STATE_NOT_RECOVERED_BLOCK; public class Coordinator extends AbstractLifecycleComponent implements Discovery { private static final Logger logger = LogManager.getLogger(Coordinator.class); // the timeout before emitting an info log about a slow-running publication public static final Setting<TimeValue> PUBLISH_INFO_TIMEOUT_SETTING = Setting.timeSetting("cluster.publish.info_timeout", TimeValue.timeValueMillis(10000), TimeValue.timeValueMillis(1), Setting.Property.NodeScope); // the timeout for the publication of each value public static final Setting<TimeValue> PUBLISH_TIMEOUT_SETTING = Setting.timeSetting("cluster.publish.timeout", TimeValue.timeValueMillis(30000), TimeValue.timeValueMillis(1), Setting.Property.NodeScope); private final Settings settings; private final boolean singleNodeDiscovery; private final ElectionStrategy electionStrategy; private final TransportService transportService; private final MasterService masterService; private final AllocationService allocationService; private final JoinHelper joinHelper; private final NodeRemovalClusterStateTaskExecutor nodeRemovalExecutor; private final Supplier<CoordinationState.PersistedState> persistedStateSupplier; private final NoMasterBlockService noMasterBlockService; final Object mutex = new Object(); // package-private to allow tests to call methods that assert that the mutex is held private final SetOnce<CoordinationState> coordinationState = new SetOnce<>(); // initialized on start-up (see doStart) private volatile ClusterState applierState; // the state that should be exposed to the cluster state applier private final PeerFinder peerFinder; private final PreVoteCollector preVoteCollector; private final Random random; private final ElectionSchedulerFactory electionSchedulerFactory; private final SeedHostsResolver configuredHostsResolver; private final TimeValue publishTimeout; private final TimeValue publishInfoTimeout; private final PublicationTransportHandler publicationHandler; private final LeaderChecker leaderChecker; private final FollowersChecker followersChecker; private final ClusterApplier clusterApplier; private final Collection<BiConsumer<DiscoveryNode, ClusterState>> onJoinValidators; @Nullable private Releasable electionScheduler; @Nullable private Releasable prevotingRound; private long maxTermSeen; private final Reconfigurator reconfigurator; private final ClusterBootstrapService clusterBootstrapService; private final LagDetector lagDetector; private final ClusterFormationFailureHelper clusterFormationFailureHelper; private Mode mode; private Optional<DiscoveryNode> lastKnownLeader; private Optional<Join> lastJoin; private JoinHelper.JoinAccumulator joinAccumulator; private Optional<CoordinatorPublication> currentPublication = Optional.empty(); /** * @param nodeName The name of the node, used to name the {@link java.util.concurrent.ExecutorService} of the {@link SeedHostsResolver}. * @param onJoinValidators A collection of join validators to restrict which nodes may join the cluster. */ public Coordinator(String nodeName, Settings settings, ClusterSettings clusterSettings, TransportService transportService, NamedWriteableRegistry namedWriteableRegistry, AllocationService allocationService, MasterService masterService, Supplier<CoordinationState.PersistedState> persistedStateSupplier, SeedHostsProvider seedHostsProvider, ClusterApplier clusterApplier, Collection<BiConsumer<DiscoveryNode, ClusterState>> onJoinValidators, Random random, RerouteService rerouteService, ElectionStrategy electionStrategy) { this.settings = settings; this.transportService = transportService; this.masterService = masterService; this.allocationService = allocationService; this.onJoinValidators = JoinTaskExecutor.addBuiltInJoinValidators(onJoinValidators); this.singleNodeDiscovery = DiscoveryModule.isSingleNodeDiscovery(settings); this.electionStrategy = electionStrategy; this.joinHelper = new JoinHelper(settings, allocationService, masterService, transportService, this::getCurrentTerm, this::getStateForMasterService, this::handleJoinRequest, this::joinLeaderInTerm, this.onJoinValidators, rerouteService); this.persistedStateSupplier = persistedStateSupplier; this.noMasterBlockService = new NoMasterBlockService(settings, clusterSettings); this.lastKnownLeader = Optional.empty(); this.lastJoin = Optional.empty(); this.joinAccumulator = new InitialJoinAccumulator(); this.publishTimeout = PUBLISH_TIMEOUT_SETTING.get(settings); this.publishInfoTimeout = PUBLISH_INFO_TIMEOUT_SETTING.get(settings); this.random = random; this.electionSchedulerFactory = new ElectionSchedulerFactory(settings, random, transportService.getThreadPool()); this.preVoteCollector = new PreVoteCollector(transportService, this::startElection, this::updateMaxTermSeen, electionStrategy); configuredHostsResolver = new SeedHostsResolver(nodeName, settings, transportService, seedHostsProvider); this.peerFinder = new CoordinatorPeerFinder(settings, transportService, new HandshakingTransportAddressConnector(settings, transportService), configuredHostsResolver); this.publicationHandler = new PublicationTransportHandler(transportService, namedWriteableRegistry, this::handlePublishRequest, this::handleApplyCommit); this.leaderChecker = new LeaderChecker(settings, transportService, this::onLeaderFailure); this.followersChecker = new FollowersChecker(settings, transportService, this::onFollowerCheckRequest, this::removeNode); this.nodeRemovalExecutor = new NodeRemovalClusterStateTaskExecutor(allocationService, logger); this.clusterApplier = clusterApplier; masterService.setClusterStateSupplier(this::getStateForMasterService); this.reconfigurator = new Reconfigurator(settings, clusterSettings); this.clusterBootstrapService = new ClusterBootstrapService(settings, transportService, this::getFoundPeers, this::isInitialConfigurationSet, this::setInitialConfiguration); this.lagDetector = new LagDetector(settings, transportService.getThreadPool(), n -> removeNode(n, "lagging"), transportService::getLocalNode); this.clusterFormationFailureHelper = new ClusterFormationFailureHelper(settings, this::getClusterFormationState, transportService.getThreadPool(), joinHelper::logLastFailedJoinAttempt); } private ClusterFormationState getClusterFormationState() { return new ClusterFormationState(settings, getStateForMasterService(), peerFinder.getLastResolvedAddresses(), Stream.concat(Stream.of(getLocalNode()), StreamSupport.stream(peerFinder.getFoundPeers().spliterator(), false)) .collect(Collectors.toList()), getCurrentTerm(), electionStrategy); } private void onLeaderFailure(Exception e) { synchronized (mutex) { if (mode != Mode.CANDIDATE) { assert lastKnownLeader.isPresent(); logger.info(new ParameterizedMessage("master node [{}] failed, restarting discovery", lastKnownLeader.get()), e); } becomeCandidate("onLeaderFailure"); } } private void removeNode(DiscoveryNode discoveryNode, String reason) { synchronized (mutex) { if (mode == Mode.LEADER) { masterService.submitStateUpdateTask("node-left", new NodeRemovalClusterStateTaskExecutor.Task(discoveryNode, reason), ClusterStateTaskConfig.build(Priority.IMMEDIATE), nodeRemovalExecutor, nodeRemovalExecutor); } } } void onFollowerCheckRequest(FollowerCheckRequest followerCheckRequest) { synchronized (mutex) { ensureTermAtLeast(followerCheckRequest.getSender(), followerCheckRequest.getTerm()); if (getCurrentTerm() != followerCheckRequest.getTerm()) { logger.trace("onFollowerCheckRequest: current term is [{}], rejecting {}", getCurrentTerm(), followerCheckRequest); throw new CoordinationStateRejectedException("onFollowerCheckRequest: current term is [" + getCurrentTerm() + "], rejecting " + followerCheckRequest); } // check if node has accepted a state in this term already. If not, this node has never committed a cluster state in this // term and therefore never removed the NO_MASTER_BLOCK for this term. This logic ensures that we quickly turn a node // into follower, even before receiving the first cluster state update, but also don't have to deal with the situation // where we would possibly have to remove the NO_MASTER_BLOCK from the applierState when turning a candidate back to follower. if (getLastAcceptedState().term() < getCurrentTerm()) { becomeFollower("onFollowerCheckRequest", followerCheckRequest.getSender()); } else if (mode == Mode.FOLLOWER) { logger.trace("onFollowerCheckRequest: responding successfully to {}", followerCheckRequest); } else if (joinHelper.isJoinPending()) { logger.trace("onFollowerCheckRequest: rejoining master, responding successfully to {}", followerCheckRequest); } else { logger.trace("onFollowerCheckRequest: received check from faulty master, rejecting {}", followerCheckRequest); throw new CoordinationStateRejectedException( "onFollowerCheckRequest: received check from faulty master, rejecting " + followerCheckRequest); } } } private void handleApplyCommit(ApplyCommitRequest applyCommitRequest, ActionListener<Void> applyListener) { synchronized (mutex) { logger.trace("handleApplyCommit: applying commit {}", applyCommitRequest); coordinationState.get().handleCommit(applyCommitRequest); final ClusterState committedState = hideStateIfNotRecovered(coordinationState.get().getLastAcceptedState()); applierState = mode == Mode.CANDIDATE ? clusterStateWithNoMasterBlock(committedState) : committedState; if (applyCommitRequest.getSourceNode().equals(getLocalNode())) { // master node applies the committed state at the end of the publication process, not here. applyListener.onResponse(null); } else { clusterApplier.onNewClusterState(applyCommitRequest.toString(), () -> applierState, new ClusterApplyListener() { @Override public void onFailure(String source, Exception e) { applyListener.onFailure(e); } @Override public void onSuccess(String source) { applyListener.onResponse(null); } }); } } } PublishWithJoinResponse handlePublishRequest(PublishRequest publishRequest) { assert publishRequest.getAcceptedState().nodes().getLocalNode().equals(getLocalNode()) : publishRequest.getAcceptedState().nodes().getLocalNode() + " != " + getLocalNode(); synchronized (mutex) { final DiscoveryNode sourceNode = publishRequest.getAcceptedState().nodes().getMasterNode(); logger.trace("handlePublishRequest: handling [{}] from [{}]", publishRequest, sourceNode); if (sourceNode.equals(getLocalNode()) && mode != Mode.LEADER) { // Rare case in which we stood down as leader between starting this publication and receiving it ourselves. The publication // is already failed so there is no point in proceeding. throw new CoordinationStateRejectedException("no longer leading this publication's term: " + publishRequest); } final ClusterState localState = coordinationState.get().getLastAcceptedState(); if (localState.metadata().clusterUUIDCommitted() && localState.metadata().clusterUUID().equals(publishRequest.getAcceptedState().metadata().clusterUUID()) == false) { logger.warn("received cluster state from {} with a different cluster uuid {} than local cluster uuid {}, rejecting", sourceNode, publishRequest.getAcceptedState().metadata().clusterUUID(), localState.metadata().clusterUUID()); throw new CoordinationStateRejectedException("received cluster state from " + sourceNode + " with a different cluster uuid " + publishRequest.getAcceptedState().metadata().clusterUUID() + " than local cluster uuid " + localState.metadata().clusterUUID() + ", rejecting"); } if (publishRequest.getAcceptedState().term() > localState.term()) { // only do join validation if we have not accepted state from this master yet onJoinValidators.forEach(a -> a.accept(getLocalNode(), publishRequest.getAcceptedState())); } ensureTermAtLeast(sourceNode, publishRequest.getAcceptedState().term()); final PublishResponse publishResponse = coordinationState.get().handlePublishRequest(publishRequest); if (sourceNode.equals(getLocalNode())) { preVoteCollector.update(getPreVoteResponse(), getLocalNode()); } else { becomeFollower("handlePublishRequest", sourceNode); // also updates preVoteCollector } return new PublishWithJoinResponse(publishResponse, joinWithDestination(lastJoin, sourceNode, publishRequest.getAcceptedState().term())); } } private static Optional<Join> joinWithDestination(Optional<Join> lastJoin, DiscoveryNode leader, long term) { if (lastJoin.isPresent() && lastJoin.get().targetMatches(leader) && lastJoin.get().getTerm() == term) { return lastJoin; } return Optional.empty(); } private void closePrevotingAndElectionScheduler() { if (prevotingRound != null) { prevotingRound.close(); prevotingRound = null; } if (electionScheduler != null) { electionScheduler.close(); electionScheduler = null; } } private void updateMaxTermSeen(final long term) { synchronized (mutex) { maxTermSeen = Math.max(maxTermSeen, term); final long currentTerm = getCurrentTerm(); if (mode == Mode.LEADER && maxTermSeen > currentTerm) { // Bump our term. However if there is a publication in flight then doing so would cancel the publication, so don't do that // since we check whether a term bump is needed at the end of the publication too. if (publicationInProgress()) { logger.debug("updateMaxTermSeen: maxTermSeen = {} > currentTerm = {}, enqueueing term bump", maxTermSeen, currentTerm); } else { try { logger.debug("updateMaxTermSeen: maxTermSeen = {} > currentTerm = {}, bumping term", maxTermSeen, currentTerm); ensureTermAtLeast(getLocalNode(), maxTermSeen); startElection(); } catch (Exception e) { logger.warn(new ParameterizedMessage("failed to bump term to {}", maxTermSeen), e); becomeCandidate("updateMaxTermSeen"); } } } } } private void startElection() { synchronized (mutex) { // The preVoteCollector is only active while we are candidate, but it does not call this method with synchronisation, so we have // to check our mode again here. if (mode == Mode.CANDIDATE) { if (localNodeMayWinElection(getLastAcceptedState()) == false) { logger.trace("skip election as local node may not win it: {}", getLastAcceptedState().coordinationMetadata()); return; } final StartJoinRequest startJoinRequest = new StartJoinRequest(getLocalNode(), Math.max(getCurrentTerm(), maxTermSeen) + 1); logger.debug("starting election with {}", startJoinRequest); getDiscoveredNodes().forEach(node -> joinHelper.sendStartJoinRequest(startJoinRequest, node)); } } } private void abdicateTo(DiscoveryNode newMaster) { assert Thread.holdsLock(mutex); assert mode == Mode.LEADER : "expected to be leader on abdication but was " + mode; assert newMaster.isMasterNode() : "should only abdicate to master-eligible node but was " + newMaster; final StartJoinRequest startJoinRequest = new StartJoinRequest(newMaster, Math.max(getCurrentTerm(), maxTermSeen) + 1); logger.info("abdicating to {} with term {}", newMaster, startJoinRequest.getTerm()); getLastAcceptedState().nodes().mastersFirstStream().forEach(node -> joinHelper.sendStartJoinRequest(startJoinRequest, node)); // handling of start join messages on the local node will be dispatched to the generic thread-pool assert mode == Mode.LEADER : "should still be leader after sending abdication messages " + mode; // explicitly move node to candidate state so that the next cluster state update task yields an onNoLongerMaster event becomeCandidate("after abdicating to " + newMaster); } private static boolean localNodeMayWinElection(ClusterState lastAcceptedState) { final DiscoveryNode localNode = lastAcceptedState.nodes().getLocalNode(); assert localNode != null; return nodeMayWinElection(lastAcceptedState, localNode); } private static boolean nodeMayWinElection(ClusterState lastAcceptedState, DiscoveryNode node) { final String nodeId = node.getId(); return lastAcceptedState.getLastCommittedConfiguration().getNodeIds().contains(nodeId) || lastAcceptedState.getLastAcceptedConfiguration().getNodeIds().contains(nodeId) || lastAcceptedState.getVotingConfigExclusions().stream().noneMatch(vce -> vce.getNodeId().equals(nodeId)); } private Optional<Join> ensureTermAtLeast(DiscoveryNode sourceNode, long targetTerm) { assert Thread.holdsLock(mutex) : "Coordinator mutex not held"; if (getCurrentTerm() < targetTerm) { return Optional.of(joinLeaderInTerm(new StartJoinRequest(sourceNode, targetTerm))); } return Optional.empty(); } private Join joinLeaderInTerm(StartJoinRequest startJoinRequest) { synchronized (mutex) { logger.debug("joinLeaderInTerm: for [{}] with term {}", startJoinRequest.getSourceNode(), startJoinRequest.getTerm()); final Join join = coordinationState.get().handleStartJoin(startJoinRequest); lastJoin = Optional.of(join); peerFinder.setCurrentTerm(getCurrentTerm()); if (mode != Mode.CANDIDATE) { becomeCandidate("joinLeaderInTerm"); // updates followersChecker and preVoteCollector } else { followersChecker.updateFastResponseState(getCurrentTerm(), mode); preVoteCollector.update(getPreVoteResponse(), null); } return join; } } private void handleJoinRequest(JoinRequest joinRequest, JoinHelper.JoinCallback joinCallback) { assert Thread.holdsLock(mutex) == false; assert getLocalNode().isMasterNode() : getLocalNode() + " received a join but is not master-eligible"; logger.trace("handleJoinRequest: as {}, handling {}", mode, joinRequest); if (singleNodeDiscovery && joinRequest.getSourceNode().equals(getLocalNode()) == false) { joinCallback.onFailure(new IllegalStateException("cannot join node with [" + DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey() + "] set to [" + DiscoveryModule.SINGLE_NODE_DISCOVERY_TYPE + "] discovery")); return; } transportService.connectToNode(joinRequest.getSourceNode(), ActionListener.wrap(ignore -> { final ClusterState stateForJoinValidation = getStateForMasterService(); if (stateForJoinValidation.nodes().isLocalNodeElectedMaster()) { onJoinValidators.forEach(a -> a.accept(joinRequest.getSourceNode(), stateForJoinValidation)); if (stateForJoinValidation.getBlocks().hasGlobalBlock(STATE_NOT_RECOVERED_BLOCK) == false) { // we do this in a couple of places including the cluster update thread. This one here is really just best effort // to ensure we fail as fast as possible. JoinTaskExecutor.ensureMajorVersionBarrier(joinRequest.getSourceNode().getVersion(), stateForJoinValidation.getNodes().getMinNodeVersion()); } sendValidateJoinRequest(stateForJoinValidation, joinRequest, joinCallback); } else { processJoinRequest(joinRequest, joinCallback); } }, joinCallback::onFailure)); } // package private for tests void sendValidateJoinRequest(ClusterState stateForJoinValidation, JoinRequest joinRequest, JoinHelper.JoinCallback joinCallback) { // validate the join on the joining node, will throw a failure if it fails the validation joinHelper.sendValidateJoinRequest(joinRequest.getSourceNode(), stateForJoinValidation, new ActionListener<Empty>() { @Override public void onResponse(Empty empty) { try { processJoinRequest(joinRequest, joinCallback); } catch (Exception e) { joinCallback.onFailure(e); } } @Override public void onFailure(Exception e) { logger.warn(() -> new ParameterizedMessage("failed to validate incoming join request from node [{}]", joinRequest.getSourceNode()), e); joinCallback.onFailure(new IllegalStateException("failure when sending a validation request to node", e)); } }); } private void processJoinRequest(JoinRequest joinRequest, JoinHelper.JoinCallback joinCallback) { final Optional<Join> optionalJoin = joinRequest.getOptionalJoin(); synchronized (mutex) { updateMaxTermSeen(joinRequest.getTerm()); final CoordinationState coordState = coordinationState.get(); final boolean prevElectionWon = coordState.electionWon(); optionalJoin.ifPresent(this::handleJoin); joinAccumulator.handleJoinRequest(joinRequest.getSourceNode(), joinCallback); if (prevElectionWon == false && coordState.electionWon()) { becomeLeader("handleJoinRequest"); } } } void becomeCandidate(String method) { assert Thread.holdsLock(mutex) : "Coordinator mutex not held"; logger.debug("{}: coordinator becoming CANDIDATE in term {} (was {}, lastKnownLeader was [{}])", method, getCurrentTerm(), mode, lastKnownLeader); if (mode != Mode.CANDIDATE) { final Mode prevMode = mode; mode = Mode.CANDIDATE; cancelActivePublication("become candidate: " + method); joinAccumulator.close(mode); joinAccumulator = joinHelper.new CandidateJoinAccumulator(); peerFinder.activate(coordinationState.get().getLastAcceptedState().nodes()); clusterFormationFailureHelper.start(); leaderChecker.setCurrentNodes(DiscoveryNodes.EMPTY_NODES); leaderChecker.updateLeader(null); followersChecker.clearCurrentNodes(); followersChecker.updateFastResponseState(getCurrentTerm(), mode); lagDetector.clearTrackedNodes(); if (prevMode == Mode.LEADER) { cleanMasterService(); } if (applierState.nodes().getMasterNodeId() != null) { applierState = clusterStateWithNoMasterBlock(applierState); clusterApplier.onNewClusterState("becoming candidate: " + method, () -> applierState, (source, e) -> { }); } } preVoteCollector.update(getPreVoteResponse(), null); } void becomeLeader(String method) { assert Thread.holdsLock(mutex) : "Coordinator mutex not held"; assert mode == Mode.CANDIDATE : "expected candidate but was " + mode; assert getLocalNode().isMasterNode() : getLocalNode() + " became a leader but is not master-eligible"; logger.debug("{}: coordinator becoming LEADER in term {} (was {}, lastKnownLeader was [{}])", method, getCurrentTerm(), mode, lastKnownLeader); mode = Mode.LEADER; joinAccumulator.close(mode); joinAccumulator = joinHelper.new LeaderJoinAccumulator(); lastKnownLeader = Optional.of(getLocalNode()); peerFinder.deactivate(getLocalNode()); clusterFormationFailureHelper.stop(); closePrevotingAndElectionScheduler(); preVoteCollector.update(getPreVoteResponse(), getLocalNode()); assert leaderChecker.leader() == null : leaderChecker.leader(); followersChecker.updateFastResponseState(getCurrentTerm(), mode); } void becomeFollower(String method, DiscoveryNode leaderNode) { assert Thread.holdsLock(mutex) : "Coordinator mutex not held"; assert leaderNode.isMasterNode() : leaderNode + " became a leader but is not master-eligible"; assert mode != Mode.LEADER : "do not switch to follower from leader (should be candidate first)"; if (mode == Mode.FOLLOWER && Optional.of(leaderNode).equals(lastKnownLeader)) { logger.trace("{}: coordinator remaining FOLLOWER of [{}] in term {}", method, leaderNode, getCurrentTerm()); } else { logger.debug("{}: coordinator becoming FOLLOWER of [{}] in term {} (was {}, lastKnownLeader was [{}])", method, leaderNode, getCurrentTerm(), mode, lastKnownLeader); } final boolean restartLeaderChecker = (mode == Mode.FOLLOWER && Optional.of(leaderNode).equals(lastKnownLeader)) == false; if (mode != Mode.FOLLOWER) { mode = Mode.FOLLOWER; joinAccumulator.close(mode); joinAccumulator = new JoinHelper.FollowerJoinAccumulator(); leaderChecker.setCurrentNodes(DiscoveryNodes.EMPTY_NODES); } lastKnownLeader = Optional.of(leaderNode); peerFinder.deactivate(leaderNode); clusterFormationFailureHelper.stop(); closePrevotingAndElectionScheduler(); cancelActivePublication("become follower: " + method); preVoteCollector.update(getPreVoteResponse(), leaderNode); if (restartLeaderChecker) { leaderChecker.updateLeader(leaderNode); } followersChecker.clearCurrentNodes(); followersChecker.updateFastResponseState(getCurrentTerm(), mode); lagDetector.clearTrackedNodes(); } private void cleanMasterService() { masterService.submitStateUpdateTask("clean-up after stepping down as master", new LocalClusterUpdateTask() { @Override public void onFailure(String source, Exception e) { // ignore logger.trace("failed to clean-up after stepping down as master", e); } @Override public ClusterTasksResult<LocalClusterUpdateTask> execute(ClusterState currentState) { if (currentState.nodes().isLocalNodeElectedMaster() == false) { allocationService.cleanCaches(); } return unchanged(); } }); } private PreVoteResponse getPreVoteResponse() { return new PreVoteResponse(getCurrentTerm(), coordinationState.get().getLastAcceptedTerm(), coordinationState.get().getLastAcceptedState().version()); } // package-visible for testing long getCurrentTerm() { synchronized (mutex) { return coordinationState.get().getCurrentTerm(); } } // package-visible for testing Mode getMode() { synchronized (mutex) { return mode; } } // visible for testing DiscoveryNode getLocalNode() { return transportService.getLocalNode(); } // package-visible for testing boolean publicationInProgress() { synchronized (mutex) { return currentPublication.isPresent(); } } @Override protected void doStart() { synchronized (mutex) { CoordinationState.PersistedState persistedState = persistedStateSupplier.get(); coordinationState.set(new CoordinationState(getLocalNode(), persistedState, electionStrategy)); peerFinder.setCurrentTerm(getCurrentTerm()); configuredHostsResolver.start(); final ClusterState lastAcceptedState = coordinationState.get().getLastAcceptedState(); if (lastAcceptedState.metadata().clusterUUIDCommitted()) { logger.info("cluster UUID [{}]", lastAcceptedState.metadata().clusterUUID()); } final VotingConfiguration votingConfiguration = lastAcceptedState.getLastCommittedConfiguration(); if (singleNodeDiscovery && votingConfiguration.isEmpty() == false && votingConfiguration.hasQuorum(Collections.singleton(getLocalNode().getId())) == false) { throw new IllegalStateException("cannot start with [" + DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey() + "] set to [" + DiscoveryModule.SINGLE_NODE_DISCOVERY_TYPE + "] when local node " + getLocalNode() + " does not have quorum in voting configuration " + votingConfiguration); } ClusterState initialState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.get(settings)) .blocks(ClusterBlocks.builder() .addGlobalBlock(STATE_NOT_RECOVERED_BLOCK) .addGlobalBlock(noMasterBlockService.getNoMasterBlock())) .nodes(DiscoveryNodes.builder().add(getLocalNode()).localNodeId(getLocalNode().getId())) .build(); applierState = initialState; clusterApplier.setInitialState(initialState); } } @Override public DiscoveryStats stats() { return new DiscoveryStats(new PendingClusterStateStats(0, 0, 0), publicationHandler.stats()); } @Override public void startInitialJoin() { synchronized (mutex) { becomeCandidate("startInitialJoin"); } clusterBootstrapService.scheduleUnconfiguredBootstrap(); } @Override protected void doStop() { configuredHostsResolver.stop(); } @Override protected void doClose() throws IOException { final CoordinationState coordinationState = this.coordinationState.get(); if (coordinationState != null) { // This looks like a race that might leak an unclosed CoordinationState if it's created while execution is here, but this method // is synchronized on AbstractLifecycleComponent#lifestyle, as is the doStart() method that creates the CoordinationState, so // it's all ok. synchronized (mutex) { coordinationState.close(); } } } public void invariant() { synchronized (mutex) { final Optional<DiscoveryNode> peerFinderLeader = peerFinder.getLeader(); assert peerFinder.getCurrentTerm() == getCurrentTerm(); assert followersChecker.getFastResponseState().term == getCurrentTerm() : followersChecker.getFastResponseState(); assert followersChecker.getFastResponseState().mode == getMode() : followersChecker.getFastResponseState(); assert (applierState.nodes().getMasterNodeId() == null) == applierState.blocks().hasGlobalBlockWithId(NO_MASTER_BLOCK_ID); assert preVoteCollector.getPreVoteResponse().equals(getPreVoteResponse()) : preVoteCollector + " vs " + getPreVoteResponse(); assert lagDetector.getTrackedNodes().contains(getLocalNode()) == false : lagDetector.getTrackedNodes(); assert followersChecker.getKnownFollowers().equals(lagDetector.getTrackedNodes()) : followersChecker.getKnownFollowers() + " vs " + lagDetector.getTrackedNodes(); if (mode == Mode.LEADER) { final boolean becomingMaster = getStateForMasterService().term() != getCurrentTerm(); assert coordinationState.get().electionWon(); assert lastKnownLeader.isPresent() && lastKnownLeader.get().equals(getLocalNode()); assert joinAccumulator instanceof JoinHelper.LeaderJoinAccumulator; assert peerFinderLeader.equals(lastKnownLeader) : peerFinderLeader; assert electionScheduler == null : electionScheduler; assert prevotingRound == null : prevotingRound; assert becomingMaster || getStateForMasterService().nodes().getMasterNodeId() != null : getStateForMasterService(); assert leaderChecker.leader() == null : leaderChecker.leader(); assert getLocalNode().equals(applierState.nodes().getMasterNode()) || (applierState.nodes().getMasterNodeId() == null && applierState.term() < getCurrentTerm()); assert preVoteCollector.getLeader() == getLocalNode() : preVoteCollector; assert clusterFormationFailureHelper.isRunning() == false; final boolean activePublication = currentPublication.map(CoordinatorPublication::isActiveForCurrentLeader).orElse(false); if (becomingMaster && activePublication == false) { // cluster state update task to become master is submitted to MasterService, but publication has not started yet assert followersChecker.getKnownFollowers().isEmpty() : followersChecker.getKnownFollowers(); } else { final ClusterState lastPublishedState; if (activePublication) { // active publication in progress: followersChecker is up-to-date with nodes that we're actively publishing to lastPublishedState = currentPublication.get().publishedState(); } else { // no active publication: followersChecker is up-to-date with the nodes of the latest publication lastPublishedState = coordinationState.get().getLastAcceptedState(); } final Set<DiscoveryNode> lastPublishedNodes = new HashSet<>(); lastPublishedState.nodes().forEach(lastPublishedNodes::add); assert lastPublishedNodes.remove(getLocalNode()); // followersChecker excludes local node assert lastPublishedNodes.equals(followersChecker.getKnownFollowers()) : lastPublishedNodes + " != " + followersChecker.getKnownFollowers(); } assert becomingMaster || activePublication || coordinationState.get().getLastAcceptedConfiguration().equals(coordinationState.get().getLastCommittedConfiguration()) : coordinationState.get().getLastAcceptedConfiguration() + " != " + coordinationState.get().getLastCommittedConfiguration(); } else if (mode == Mode.FOLLOWER) { assert coordinationState.get().electionWon() == false : getLocalNode() + " is FOLLOWER so electionWon() should be false"; assert lastKnownLeader.isPresent() && (lastKnownLeader.get().equals(getLocalNode()) == false); assert joinAccumulator instanceof JoinHelper.FollowerJoinAccumulator; assert peerFinderLeader.equals(lastKnownLeader) : peerFinderLeader; assert electionScheduler == null : electionScheduler; assert prevotingRound == null : prevotingRound; assert getStateForMasterService().nodes().getMasterNodeId() == null : getStateForMasterService(); assert leaderChecker.currentNodeIsMaster() == false; assert lastKnownLeader.equals(Optional.of(leaderChecker.leader())); assert followersChecker.getKnownFollowers().isEmpty(); assert lastKnownLeader.get().equals(applierState.nodes().getMasterNode()) || (applierState.nodes().getMasterNodeId() == null && (applierState.term() < getCurrentTerm() || applierState.version() < getLastAcceptedState().version())); assert currentPublication.map(Publication::isCommitted).orElse(true); assert preVoteCollector.getLeader().equals(lastKnownLeader.get()) : preVoteCollector; assert clusterFormationFailureHelper.isRunning() == false; } else { assert mode == Mode.CANDIDATE; assert joinAccumulator instanceof JoinHelper.CandidateJoinAccumulator; assert peerFinderLeader.isPresent() == false : peerFinderLeader; assert prevotingRound == null || electionScheduler != null; assert getStateForMasterService().nodes().getMasterNodeId() == null : getStateForMasterService(); assert leaderChecker.currentNodeIsMaster() == false; assert leaderChecker.leader() == null : leaderChecker.leader(); assert followersChecker.getKnownFollowers().isEmpty(); assert applierState.nodes().getMasterNodeId() == null; assert currentPublication.map(Publication::isCommitted).orElse(true); assert preVoteCollector.getLeader() == null : preVoteCollector; assert clusterFormationFailureHelper.isRunning(); } } } public boolean isInitialConfigurationSet() { return getStateForMasterService().getLastAcceptedConfiguration().isEmpty() == false; } /** * Sets the initial configuration to the given {@link VotingConfiguration}. This method is safe to call * more than once, as long as the argument to each call is the same. * * @param votingConfiguration The nodes that should form the initial configuration. * @return whether this call successfully set the initial configuration - if false, the cluster has already been bootstrapped. */ public boolean setInitialConfiguration(final VotingConfiguration votingConfiguration) { synchronized (mutex) { final ClusterState currentState = getStateForMasterService(); if (isInitialConfigurationSet()) { logger.debug("initial configuration already set, ignoring {}", votingConfiguration); return false; } if (getLocalNode().isMasterNode() == false) { logger.debug("skip setting initial configuration as local node is not a master-eligible node"); throw new CoordinationStateRejectedException( "this node is not master-eligible, but cluster bootstrapping can only happen on a master-eligible node"); } if (votingConfiguration.getNodeIds().contains(getLocalNode().getId()) == false) { logger.debug("skip setting initial configuration as local node is not part of initial configuration"); throw new CoordinationStateRejectedException("local node is not part of initial configuration"); } final List<DiscoveryNode> knownNodes = new ArrayList<>(); knownNodes.add(getLocalNode()); peerFinder.getFoundPeers().forEach(knownNodes::add); if (votingConfiguration.hasQuorum(knownNodes.stream().map(DiscoveryNode::getId).collect(Collectors.toList())) == false) { logger.debug("skip setting initial configuration as not enough nodes discovered to form a quorum in the " + "initial configuration [knownNodes={}, {}]", knownNodes, votingConfiguration); throw new CoordinationStateRejectedException("not enough nodes discovered to form a quorum in the initial configuration " + "[knownNodes=" + knownNodes + ", " + votingConfiguration + "]"); } logger.info("setting initial configuration to {}", votingConfiguration); final CoordinationMetadata coordinationMetadata = CoordinationMetadata.builder(currentState.coordinationMetadata()) .lastAcceptedConfiguration(votingConfiguration) .lastCommittedConfiguration(votingConfiguration) .build(); Metadata.Builder metadataBuilder = Metadata.builder(currentState.metadata()); // automatically generate a UID for the metadata if we need to metadataBuilder.generateClusterUuidIfNeeded(); metadataBuilder.coordinationMetadata(coordinationMetadata); coordinationState.get().setInitialState(ClusterState.builder(currentState).metadata(metadataBuilder).build()); assert localNodeMayWinElection(getLastAcceptedState()) : "initial state does not allow local node to win election: " + getLastAcceptedState().coordinationMetadata(); preVoteCollector.update(getPreVoteResponse(), null); // pick up the change to last-accepted version startElectionScheduler(); return true; } } // Package-private for testing ClusterState improveConfiguration(ClusterState clusterState) { assert Thread.holdsLock(mutex) : "Coordinator mutex not held"; assert validVotingConfigExclusionState(clusterState) : clusterState; // exclude any nodes whose ID is in the voting config exclusions list ... final Stream<String> excludedNodeIds = clusterState.getVotingConfigExclusions().stream().map(VotingConfigExclusion::getNodeId); // ... and also automatically exclude the node IDs of master-ineligible nodes that were previously master-eligible and are still in // the voting config. We could exclude all the master-ineligible nodes here, but there could be quite a few of them and that makes // the logging much harder to follow. final Stream<String> masterIneligibleNodeIdsInVotingConfig = StreamSupport.stream(clusterState.nodes().spliterator(), false) .filter(n -> n.isMasterNode() == false && (clusterState.getLastAcceptedConfiguration().getNodeIds().contains(n.getId()) || clusterState.getLastCommittedConfiguration().getNodeIds().contains(n.getId()))) .map(DiscoveryNode::getId); final Set<DiscoveryNode> liveNodes = StreamSupport.stream(clusterState.nodes().spliterator(), false) .filter(DiscoveryNode::isMasterNode).filter(coordinationState.get()::containsJoinVoteFor).collect(Collectors.toSet()); final VotingConfiguration newConfig = reconfigurator.reconfigure(liveNodes, Stream.concat(masterIneligibleNodeIdsInVotingConfig, excludedNodeIds).collect(Collectors.toSet()), getLocalNode(), clusterState.getLastAcceptedConfiguration()); if (newConfig.equals(clusterState.getLastAcceptedConfiguration()) == false) { assert coordinationState.get().joinVotesHaveQuorumFor(newConfig); return ClusterState.builder(clusterState).metadata(Metadata.builder(clusterState.metadata()) .coordinationMetadata(CoordinationMetadata.builder(clusterState.coordinationMetadata()) .lastAcceptedConfiguration(newConfig).build())).build(); } return clusterState; } /* * Valid Voting Configuration Exclusion state criteria: * 1. Every voting config exclusion with an ID of _absent_ should not match any nodes currently in the cluster by name * 2. Every voting config exclusion with a name of _absent_ should not match any nodes currently in the cluster by ID */ static boolean validVotingConfigExclusionState(ClusterState clusterState) { Set<VotingConfigExclusion> votingConfigExclusions = clusterState.getVotingConfigExclusions(); Set<String> nodeNamesWithAbsentId = votingConfigExclusions.stream() .filter(e -> e.getNodeId().equals(VotingConfigExclusion.MISSING_VALUE_MARKER)) .map(VotingConfigExclusion::getNodeName) .collect(Collectors.toSet()); Set<String> nodeIdsWithAbsentName = votingConfigExclusions.stream() .filter(e -> e.getNodeName().equals(VotingConfigExclusion.MISSING_VALUE_MARKER)) .map(VotingConfigExclusion::getNodeId) .collect(Collectors.toSet()); for (DiscoveryNode node : clusterState.getNodes()) { if (node.isMasterNode() && (nodeIdsWithAbsentName.contains(node.getId()) || nodeNamesWithAbsentId.contains(node.getName()))) { return false; } } return true; } private AtomicBoolean reconfigurationTaskScheduled = new AtomicBoolean(); private void scheduleReconfigurationIfNeeded() { assert Thread.holdsLock(mutex) : "Coordinator mutex not held"; assert mode == Mode.LEADER : mode; assert currentPublication.isPresent() == false : "Expected no publication in progress"; final ClusterState state = getLastAcceptedState(); if (improveConfiguration(state) != state && reconfigurationTaskScheduled.compareAndSet(false, true)) { logger.trace("scheduling reconfiguration"); masterService.submitStateUpdateTask("reconfigure", new ClusterStateUpdateTask(Priority.URGENT) { @Override public ClusterState execute(ClusterState currentState) { reconfigurationTaskScheduled.set(false); synchronized (mutex) { return improveConfiguration(currentState); } } @Override public void onFailure(String source, Exception e) { reconfigurationTaskScheduled.set(false); logger.debug("reconfiguration failed", e); } }); } } // exposed for tests boolean missingJoinVoteFrom(DiscoveryNode node) { return node.isMasterNode() && coordinationState.get().containsJoinVoteFor(node) == false; } private void handleJoin(Join join) { synchronized (mutex) { ensureTermAtLeast(getLocalNode(), join.getTerm()).ifPresent(this::handleJoin); if (coordinationState.get().electionWon()) { // If we have already won the election then the actual join does not matter for election purposes, so swallow any exception final boolean isNewJoinFromMasterEligibleNode = handleJoinIgnoringExceptions(join); // If we haven't completely finished becoming master then there's already a publication scheduled which will, in turn, // schedule a reconfiguration if needed. It's benign to schedule a reconfiguration anyway, but it might fail if it wins the // race against the election-winning publication and log a big error message, which we can prevent by checking this here: final boolean establishedAsMaster = mode == Mode.LEADER && getLastAcceptedState().term() == getCurrentTerm(); if (isNewJoinFromMasterEligibleNode && establishedAsMaster && publicationInProgress() == false) { scheduleReconfigurationIfNeeded(); } } else { coordinationState.get().handleJoin(join); // this might fail and bubble up the exception } } } /** * @return true iff the join was from a new node and was successfully added */ private boolean handleJoinIgnoringExceptions(Join join) { try { return coordinationState.get().handleJoin(join); } catch (CoordinationStateRejectedException e) { logger.debug(new ParameterizedMessage("failed to add {} - ignoring", join), e); return false; } } public ClusterState getLastAcceptedState() { synchronized (mutex) { return coordinationState.get().getLastAcceptedState(); } } @Nullable public ClusterState getApplierState() { return applierState; } private List<DiscoveryNode> getDiscoveredNodes() { final List<DiscoveryNode> nodes = new ArrayList<>(); nodes.add(getLocalNode()); peerFinder.getFoundPeers().forEach(nodes::add); return nodes; } ClusterState getStateForMasterService() { synchronized (mutex) { // expose last accepted cluster state as base state upon which the master service // speculatively calculates the next cluster state update final ClusterState clusterState = coordinationState.get().getLastAcceptedState(); if (mode != Mode.LEADER || clusterState.term() != getCurrentTerm()) { // the master service checks if the local node is the master node in order to fail execution of the state update early return clusterStateWithNoMasterBlock(clusterState); } return clusterState; } } private ClusterState clusterStateWithNoMasterBlock(ClusterState clusterState) { if (clusterState.nodes().getMasterNodeId() != null) { // remove block if it already exists before adding new one assert clusterState.blocks().hasGlobalBlockWithId(NO_MASTER_BLOCK_ID) == false : "NO_MASTER_BLOCK should only be added by Coordinator"; final ClusterBlocks clusterBlocks = ClusterBlocks.builder().blocks(clusterState.blocks()).addGlobalBlock( noMasterBlockService.getNoMasterBlock()).build(); final DiscoveryNodes discoveryNodes = new DiscoveryNodes.Builder(clusterState.nodes()).masterNodeId(null).build(); return ClusterState.builder(clusterState).blocks(clusterBlocks).nodes(discoveryNodes).build(); } else { return clusterState; } } @Override public void publish(ClusterChangedEvent clusterChangedEvent, ActionListener<Void> publishListener, AckListener ackListener) { try { synchronized (mutex) { if (mode != Mode.LEADER || getCurrentTerm() != clusterChangedEvent.state().term()) { logger.debug(() -> new ParameterizedMessage("[{}] failed publication as node is no longer master for term {}", clusterChangedEvent.source(), clusterChangedEvent.state().term())); publishListener.onFailure(new FailedToCommitClusterStateException("node is no longer master for term " + clusterChangedEvent.state().term() + " while handling publication")); return; } if (currentPublication.isPresent()) { assert false : "[" + currentPublication.get() + "] in progress, cannot start new publication"; logger.warn(() -> new ParameterizedMessage("[{}] failed publication as already publication in progress", clusterChangedEvent.source())); publishListener.onFailure(new FailedToCommitClusterStateException("publication " + currentPublication.get() + " already in progress")); return; } assert assertPreviousStateConsistency(clusterChangedEvent); final ClusterState clusterState = clusterChangedEvent.state(); assert getLocalNode().equals(clusterState.getNodes().get(getLocalNode().getId())) : getLocalNode() + " should be in published " + clusterState; final PublicationTransportHandler.PublicationContext publicationContext = publicationHandler.newPublicationContext(clusterChangedEvent); final PublishRequest publishRequest = coordinationState.get().handleClientValue(clusterState); final CoordinatorPublication publication = new CoordinatorPublication(publishRequest, publicationContext, new ListenableFuture<>(), ackListener, publishListener); currentPublication = Optional.of(publication); final DiscoveryNodes publishNodes = publishRequest.getAcceptedState().nodes(); leaderChecker.setCurrentNodes(publishNodes); followersChecker.setCurrentNodes(publishNodes); lagDetector.setTrackedNodes(publishNodes); publication.start(followersChecker.getFaultyNodes()); } } catch (Exception e) { logger.debug(() -> new ParameterizedMessage("[{}] publishing failed", clusterChangedEvent.source()), e); publishListener.onFailure(new FailedToCommitClusterStateException("publishing failed", e)); } } // there is no equals on cluster state, so we just serialize it to XContent and compare Maps // deserialized from the resulting JSON private boolean assertPreviousStateConsistency(ClusterChangedEvent event) { assert event.previousState() == coordinationState.get().getLastAcceptedState() || XContentHelper.convertToMap( JsonXContent.jsonXContent, Strings.toString(event.previousState()), false ).equals( XContentHelper.convertToMap( JsonXContent.jsonXContent, Strings.toString(clusterStateWithNoMasterBlock(coordinationState.get().getLastAcceptedState())), false)) : Strings.toString(event.previousState()) + " vs " + Strings.toString(clusterStateWithNoMasterBlock(coordinationState.get().getLastAcceptedState())); return true; } private <T> ActionListener<T> wrapWithMutex(ActionListener<T> listener) { return new ActionListener<T>() { @Override public void onResponse(T t) { synchronized (mutex) { listener.onResponse(t); } } @Override public void onFailure(Exception e) { synchronized (mutex) { listener.onFailure(e); } } }; } private void cancelActivePublication(String reason) { assert Thread.holdsLock(mutex) : "Coordinator mutex not held"; if (currentPublication.isPresent()) { currentPublication.get().cancel(reason); } } public Collection<BiConsumer<DiscoveryNode, ClusterState>> getOnJoinValidators() { return onJoinValidators; } public enum Mode { CANDIDATE, LEADER, FOLLOWER } private class CoordinatorPeerFinder extends PeerFinder { CoordinatorPeerFinder(Settings settings, TransportService transportService, TransportAddressConnector transportAddressConnector, ConfiguredHostsResolver configuredHostsResolver) { super(settings, transportService, transportAddressConnector, singleNodeDiscovery ? hostsResolver -> Collections.emptyList() : configuredHostsResolver); } @Override protected void onActiveMasterFound(DiscoveryNode masterNode, long term) { synchronized (mutex) { ensureTermAtLeast(masterNode, term); joinHelper.sendJoinRequest(masterNode, getCurrentTerm(), joinWithDestination(lastJoin, masterNode, term)); } } @Override protected void startProbe(TransportAddress transportAddress) { if (singleNodeDiscovery == false) { super.startProbe(transportAddress); } } @Override protected void onFoundPeersUpdated() { synchronized (mutex) { final Iterable<DiscoveryNode> foundPeers = getFoundPeers(); if (mode == Mode.CANDIDATE) { final VoteCollection expectedVotes = new VoteCollection(); foundPeers.forEach(expectedVotes::addVote); expectedVotes.addVote(Coordinator.this.getLocalNode()); final boolean foundQuorum = coordinationState.get().isElectionQuorum(expectedVotes); if (foundQuorum) { if (electionScheduler == null) { startElectionScheduler(); } } else { closePrevotingAndElectionScheduler(); } } } clusterBootstrapService.onFoundPeersUpdated(); } } private void startElectionScheduler() { assert electionScheduler == null : electionScheduler; if (getLocalNode().isMasterNode() == false) { return; } final TimeValue gracePeriod = TimeValue.ZERO; electionScheduler = electionSchedulerFactory.startElectionScheduler(gracePeriod, new Runnable() { @Override public void run() { synchronized (mutex) { if (mode == Mode.CANDIDATE) { final ClusterState lastAcceptedState = coordinationState.get().getLastAcceptedState(); if (localNodeMayWinElection(lastAcceptedState) == false) { logger.trace("skip prevoting as local node may not win election: {}", lastAcceptedState.coordinationMetadata()); return; } if (prevotingRound != null) { prevotingRound.close(); } prevotingRound = preVoteCollector.start(lastAcceptedState, getDiscoveredNodes()); } } } @Override public String toString() { return "scheduling of new prevoting round"; } }); } public Iterable<DiscoveryNode> getFoundPeers() { return peerFinder.getFoundPeers(); } /** * If there is any current committed publication, this method cancels it. * This method is used exclusively by tests. * @return true if publication was cancelled, false if there is no current committed publication. */ boolean cancelCommittedPublication() { synchronized (mutex) { if (currentPublication.isPresent()) { final CoordinatorPublication publication = currentPublication.get(); if (publication.isCommitted()) { publication.cancel("cancelCommittedPublication"); logger.debug("Cancelled publication of [{}].", publication); return true; } } return false; } } class CoordinatorPublication extends Publication { private final PublishRequest publishRequest; private final ListenableFuture<Void> localNodeAckEvent; private final AckListener ackListener; private final ActionListener<Void> publishListener; private final PublicationTransportHandler.PublicationContext publicationContext; @Nullable // if using single-node discovery private final Scheduler.ScheduledCancellable timeoutHandler; private final Scheduler.Cancellable infoTimeoutHandler; // We may not have accepted our own state before receiving a join from another node, causing its join to be rejected (we cannot // safely accept a join whose last-accepted term/version is ahead of ours), so store them up and process them at the end. private final List<Join> receivedJoins = new ArrayList<>(); private boolean receivedJoinsProcessed; CoordinatorPublication(PublishRequest publishRequest, PublicationTransportHandler.PublicationContext publicationContext, ListenableFuture<Void> localNodeAckEvent, AckListener ackListener, ActionListener<Void> publishListener) { super(publishRequest, new AckListener() { @Override public void onCommit(TimeValue commitTime) { ackListener.onCommit(commitTime); } @Override public void onNodeAck(DiscoveryNode node, Exception e) { // acking and cluster state application for local node is handled specially if (node.equals(getLocalNode())) { synchronized (mutex) { if (e == null) { localNodeAckEvent.onResponse(null); } else { localNodeAckEvent.onFailure(e); } } } else { ackListener.onNodeAck(node, e); if (e == null) { lagDetector.setAppliedVersion(node, publishRequest.getAcceptedState().version()); } } } }, transportService.getThreadPool()::relativeTimeInMillis); this.publishRequest = publishRequest; this.publicationContext = publicationContext; this.localNodeAckEvent = localNodeAckEvent; this.ackListener = ackListener; this.publishListener = publishListener; this.timeoutHandler = singleNodeDiscovery ? null : transportService.getThreadPool().schedule(new Runnable() { @Override public void run() { synchronized (mutex) { cancel("timed out after " + publishTimeout); } } @Override public String toString() { return "scheduled timeout for " + CoordinatorPublication.this; } }, publishTimeout, Names.GENERIC); this.infoTimeoutHandler = transportService.getThreadPool().schedule(new Runnable() { @Override public void run() { synchronized (mutex) { logIncompleteNodes(Level.INFO); } } @Override public String toString() { return "scheduled timeout for reporting on " + CoordinatorPublication.this; } }, publishInfoTimeout, Names.GENERIC); } private void removePublicationAndPossiblyBecomeCandidate(String reason) { assert Thread.holdsLock(mutex) : "Coordinator mutex not held"; assert currentPublication.get() == this; currentPublication = Optional.empty(); logger.debug("publication ended unsuccessfully: {}", this); // check if node has not already switched modes (by bumping term) if (isActiveForCurrentLeader()) { becomeCandidate(reason); } } boolean isActiveForCurrentLeader() { // checks if this publication can still influence the mode of the current publication return mode == Mode.LEADER && publishRequest.getAcceptedState().term() == getCurrentTerm(); } @Override protected void onCompletion(boolean committed) { assert Thread.holdsLock(mutex) : "Coordinator mutex not held"; localNodeAckEvent.addListener(new ActionListener<Void>() { @Override public void onResponse(Void ignore) { assert Thread.holdsLock(mutex) : "Coordinator mutex not held"; assert committed; receivedJoins.forEach(CoordinatorPublication.this::handleAssociatedJoin); assert receivedJoinsProcessed == false; receivedJoinsProcessed = true; clusterApplier.onNewClusterState(CoordinatorPublication.this.toString(), () -> applierState, new ClusterApplyListener() { @Override public void onFailure(String source, Exception e) { synchronized (mutex) { removePublicationAndPossiblyBecomeCandidate("clusterApplier#onNewClusterState"); } cancelTimeoutHandlers(); ackListener.onNodeAck(getLocalNode(), e); publishListener.onFailure(e); } @Override public void onSuccess(String source) { synchronized (mutex) { assert currentPublication.get() == CoordinatorPublication.this; currentPublication = Optional.empty(); logger.debug("publication ended successfully: {}", CoordinatorPublication.this); // trigger term bump if new term was found during publication updateMaxTermSeen(getCurrentTerm()); if (mode == Mode.LEADER) { // if necessary, abdicate to another node or improve the voting configuration boolean attemptReconfiguration = true; final ClusterState state = getLastAcceptedState(); // committed state if (localNodeMayWinElection(state) == false) { final List<DiscoveryNode> masterCandidates = completedNodes().stream() .filter(DiscoveryNode::isMasterNode) .filter(node -> nodeMayWinElection(state, node)) .filter(node -> { // check if master candidate would be able to get an election quorum if we were to // abdicate to it. Assume that every node that completed the publication can provide // a vote in that next election and has the latest state. final long futureElectionTerm = state.term() + 1; final VoteCollection futureVoteCollection = new VoteCollection(); completedNodes().forEach(completedNode -> futureVoteCollection.addJoinVote( new Join(completedNode, node, futureElectionTerm, state.term(), state.version()))); return electionStrategy.isElectionQuorum(node, futureElectionTerm, state.term(), state.version(), state.getLastCommittedConfiguration(), state.getLastAcceptedConfiguration(), futureVoteCollection); }) .collect(Collectors.toList()); if (masterCandidates.isEmpty() == false) { abdicateTo(masterCandidates.get(random.nextInt(masterCandidates.size()))); attemptReconfiguration = false; } } if (attemptReconfiguration) { scheduleReconfigurationIfNeeded(); } } lagDetector.startLagDetector(publishRequest.getAcceptedState().version()); logIncompleteNodes(Level.WARN); } cancelTimeoutHandlers(); ackListener.onNodeAck(getLocalNode(), null); publishListener.onResponse(null); } }); } @Override public void onFailure(Exception e) { assert Thread.holdsLock(mutex) : "Coordinator mutex not held"; removePublicationAndPossiblyBecomeCandidate("Publication.onCompletion(false)"); cancelTimeoutHandlers(); final FailedToCommitClusterStateException exception = new FailedToCommitClusterStateException("publication failed", e); ackListener.onNodeAck(getLocalNode(), exception); // other nodes have acked, but not the master. publishListener.onFailure(exception); } }, EsExecutors.newDirectExecutorService(), transportService.getThreadPool().getThreadContext()); } private void cancelTimeoutHandlers() { if (timeoutHandler != null) { timeoutHandler.cancel(); } infoTimeoutHandler.cancel(); } private void handleAssociatedJoin(Join join) { if (join.getTerm() == getCurrentTerm() && missingJoinVoteFrom(join.getSourceNode())) { logger.trace("handling {}", join); handleJoin(join); } } @Override protected boolean isPublishQuorum(VoteCollection votes) { assert Thread.holdsLock(mutex) : "Coordinator mutex not held"; return coordinationState.get().isPublishQuorum(votes); } @Override protected Optional<ApplyCommitRequest> handlePublishResponse(DiscoveryNode sourceNode, PublishResponse publishResponse) { assert Thread.holdsLock(mutex) : "Coordinator mutex not held"; assert getCurrentTerm() >= publishResponse.getTerm(); return coordinationState.get().handlePublishResponse(sourceNode, publishResponse); } @Override protected void onJoin(Join join) { assert Thread.holdsLock(mutex) : "Coordinator mutex not held"; if (receivedJoinsProcessed) { // a late response may arrive after the state has been locally applied, meaning that receivedJoins has already been // processed, so we have to handle this late response here. handleAssociatedJoin(join); } else { receivedJoins.add(join); } } @Override protected void onMissingJoin(DiscoveryNode discoveryNode) { assert Thread.holdsLock(mutex) : "Coordinator mutex not held"; // The remote node did not include a join vote in its publish response. We do not persist joins, so it could be that the remote // node voted for us and then rebooted, or it could be that it voted for a different node in this term. If we don't have a copy // of a join from this node then we assume the latter and bump our term to obtain a vote from this node. if (missingJoinVoteFrom(discoveryNode)) { final long term = publishRequest.getAcceptedState().term(); logger.debug("onMissingJoin: no join vote from {}, bumping term to exceed {}", discoveryNode, term); updateMaxTermSeen(term + 1); } } @Override protected void sendPublishRequest(DiscoveryNode destination, PublishRequest publishRequest, ActionListener<PublishWithJoinResponse> responseActionListener) { publicationContext.sendPublishRequest(destination, publishRequest, wrapWithMutex(responseActionListener)); } @Override protected void sendApplyCommit(DiscoveryNode destination, ApplyCommitRequest applyCommit, ActionListener<Empty> responseActionListener) { publicationContext.sendApplyCommit(destination, applyCommit, wrapWithMutex(responseActionListener)); } } }
uschindler/elasticsearch
server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java
Java
apache-2.0
79,197