answer
stringlengths
15
1.25M
<?php class ITSEC_Away_Mode { function run() { //Execute away mode functions on admin init add_filter( '<API key>', array( $this, 'register_logger' ) ); add_action( 'itsec_admin_init', array( $this, 'execute_away_mode' ) ); add_action( 'login_init', array( $this, 'execute_away_mode' ) ); //Register Sync add_filter( 'itsec_sync_modules', array( $this, 'register_sync' ) ); } /** * Check if away mode is active * * @since 4.4 * * @param array $input [NULL] Input of options to check if calling from form * @param bool $remaining will return the number of seconds remaining * @param bool $override Whether or not we're calculating override values * * @return mixed true if locked out else false or times until next condition (negative until lockout, positive until release) */ public static function check_away( $input = NULL, $remaining = false, $override = false ) { global $itsec_globals; ITSEC_Lib::clear_caches(); //lets try to make sure nothing is storing a bad time $form = true; $has_away_file = @file_exists( $itsec_globals['ithemes_dir'] . '/itsec_away.confg' ); $status = false; //assume they're not locked out to start //Normal usage check if ( $input === NULL ) { //if we didn't provide input to check we need to get it $form = false; $input = get_site_option( 'itsec_away_mode' ); } if ( ( $form === false && ! isset( $input['enabled'] ) ) || ! isset( $input['type'] ) || ! isset( $input['start'] ) || ! isset( $input['end'] ) || ! $has_away_file ) { return false; //if we don't have complete settings don't lock them out } $current_time = $itsec_globals['current_time']; //use current time $enabled = isset( $input['enabled'] ) ? $input['enabled'] : $form; $test_type = $input['type']; $test_start = $input['start']; $test_end = $input['end']; if ( $test_type === 1 ) { //daily $test_start -= strtotime( date( 'Y-m-d', $test_start ) ); $test_end -= strtotime( date( 'Y-m-d', $test_end ) ); $day_seconds = $current_time - strtotime( date( 'Y-m-d', $current_time ) ); if ( $test_start === $test_end ) { $status = false; } if ( $test_start < $test_end ) { //same day if ( $test_start <= $day_seconds && $test_end >= $day_seconds && $enabled === true ) { $status = $test_end - $day_seconds; } } else { //overnight if ( ( $test_start < $day_seconds || $test_end > $day_seconds ) && $enabled === true ) { if ( $day_seconds >= $test_start ) { $status = ( 86400 - $day_seconds ) + $test_end; } else { $status = $test_end - $day_seconds; } } } } else if ( $test_start !== $test_end && $test_start <= $current_time && $test_end >= $current_time && $enabled === true ) { //one time $status = $test_end - $current_time; } //they are allowed to log in if ( $status === false ) { if ( $test_type === 1 ) { if ( $day_seconds > $test_start ) { //actually starts tomorrow $status = - ( ( 86400 + $test_start ) - $day_seconds ); } else { //starts today $status = - ( $test_start - $day_seconds ); } } else { $status = - ( $test_start - $current_time ); if ( $status > 0 ) { if ( $form === false && isset( $input['enabled'] ) && $input['enabled'] === true ) { //disable away mode if one-time is in the past $input['enabled'] = false; update_site_option( 'itsec_away_mode', $input ); } $status = 0; } } } if ( $override === false ) { //work in an override from sync $override_option = get_site_option( '<API key>' ); $override = $override_option['intention']; $expires = $override_option['expires']; if ( $expires < $itsec_globals['current_time'] ) { delete_site_option( '<API key>' ); } else { if ( $override === 'activate' ) { if ( $status <= 0 ) { //not currently locked out $input['start'] = $current_time - 1; $status = ITSEC_Away_Mode::check_away( $input, true, true ); } else { delete_site_option( '<API key>' ); } } elseif ( $override === 'deactivate' ) { if ( $status > 0 ) { //currently locked out $input['end'] = $current_time - 1; $status = ITSEC_Away_Mode::check_away( $input, true, true ); } else { delete_site_option( '<API key>' ); } } } } if ( $remaining === true ) { return $status; } else { if ( $status > 0 && $status !== false ) { return true; } } return false; //always default to NOT locking folks out } /** * Execute away mode functionality * * @return void */ public function execute_away_mode() { global $itsec_logger; //execute lockout if applicable if ( $this->check_away() ) { $itsec_logger->log_event( 'away_mode', 5, array( __( 'A host was prevented from accessing the dashboard due to away-mode restrictions being in effect', '<API key>' ), ), ITSEC_Lib::get_ip(), '', '', '', '' ); wp_redirect( get_option( 'siteurl' ) ); <API key>(); } } /** * Register 404 and file change detection for logger * * @param array $logger_modules array of logger modules * * @return array array of logger modules */ public function register_logger( $logger_modules ) { $logger_modules['away_mode'] = array( 'type' => 'away_mode', 'function' => __( 'Away Mode Triggered', '<API key>' ), ); return $logger_modules; } /** * Register Lockouts for Sync * * @param array $sync_modules array of logger modules * * @return array array of logger modules */ public function register_sync( $sync_modules ) { $sync_modules['away_mode'] = array( 'verbs' => array( 'itsec-get-away-mode' => '<API key>', '<API key>' => '<API key>' ), 'everything' => 'itsec-get-away-mode', 'path' => dirname( __FILE__ ), ); return $sync_modules; } }
/* * Altera SoCFPGA PinMux configuration * * <API key>: BSD-3-Clause */ #ifndef <API key> #define <API key> const u8 sys_mgr_init_table[] = { 3, /* EMACIO0 */ 2, /* EMACIO1 */ 2, /* EMACIO2 */ 2, /* EMACIO3 */ 2, /* EMACIO4 */ 2, /* EMACIO5 */ 2, /* EMACIO6 */ 2, /* EMACIO7 */ 2, /* EMACIO8 */ 3, /* EMACIO9 */ 2, /* EMACIO10 */ 2, /* EMACIO11 */ 2, /* EMACIO12 */ 2, /* EMACIO13 */ 0, /* EMACIO14 */ 0, /* EMACIO15 */ 0, /* EMACIO16 */ 0, /* EMACIO17 */ 0, /* EMACIO18 */ 0, /* EMACIO19 */ 3, /* FLASHIO0 */ 0, /* FLASHIO1 */ 3, /* FLASHIO2 */ 3, /* FLASHIO3 */ 3, /* FLASHIO4 */ 3, /* FLASHIO5 */ 3, /* FLASHIO6 */ 3, /* FLASHIO7 */ 0, /* FLASHIO8 */ 3, /* FLASHIO9 */ 3, /* FLASHIO10 */ 3, /* FLASHIO11 */ 0, /* GENERALIO0 */ 1, /* GENERALIO1 */ 1, /* GENERALIO2 */ 0, /* GENERALIO3 */ 0, /* GENERALIO4 */ 1, /* GENERALIO5 */ 1, /* GENERALIO6 */ 1, /* GENERALIO7 */ 1, /* GENERALIO8 */ 0, /* GENERALIO9 */ 0, /* GENERALIO10 */ 0, /* GENERALIO11 */ 0, /* GENERALIO12 */ 2, /* GENERALIO13 */ 2, /* GENERALIO14 */ 3, /* GENERALIO15 */ 3, /* GENERALIO16 */ 2, /* GENERALIO17 */ 2, /* GENERALIO18 */ 0, /* GENERALIO19 */ 0, /* GENERALIO20 */ 0, /* GENERALIO21 */ 0, /* GENERALIO22 */ 0, /* GENERALIO23 */ 0, /* GENERALIO24 */ 0, /* GENERALIO25 */ 0, /* GENERALIO26 */ 0, /* GENERALIO27 */ 0, /* GENERALIO28 */ 0, /* GENERALIO29 */ 0, /* GENERALIO30 */ 0, /* GENERALIO31 */ 2, /* MIXED1IO0 */ 2, /* MIXED1IO1 */ 2, /* MIXED1IO2 */ 2, /* MIXED1IO3 */ 2, /* MIXED1IO4 */ 2, /* MIXED1IO5 */ 2, /* MIXED1IO6 */ 2, /* MIXED1IO7 */ 2, /* MIXED1IO8 */ 2, /* MIXED1IO9 */ 2, /* MIXED1IO10 */ 2, /* MIXED1IO11 */ 2, /* MIXED1IO12 */ 2, /* MIXED1IO13 */ 0, /* MIXED1IO14 */ 3, /* MIXED1IO15 */ 3, /* MIXED1IO16 */ 3, /* MIXED1IO17 */ 3, /* MIXED1IO18 */ 3, /* MIXED1IO19 */ 3, /* MIXED1IO20 */ 0, /* MIXED1IO21 */ 0, /* MIXED2IO0 */ 0, /* MIXED2IO1 */ 0, /* MIXED2IO2 */ 0, /* MIXED2IO3 */ 0, /* MIXED2IO4 */ 0, /* MIXED2IO5 */ 0, /* MIXED2IO6 */ 0, /* MIXED2IO7 */ 0, /* GPLINMUX48 */ 0, /* GPLINMUX49 */ 0, /* GPLINMUX50 */ 0, /* GPLINMUX51 */ 0, /* GPLINMUX52 */ 0, /* GPLINMUX53 */ 0, /* GPLINMUX54 */ 0, /* GPLINMUX55 */ 0, /* GPLINMUX56 */ 0, /* GPLINMUX57 */ 0, /* GPLINMUX58 */ 0, /* GPLINMUX59 */ 0, /* GPLINMUX60 */ 0, /* GPLINMUX61 */ 0, /* GPLINMUX62 */ 0, /* GPLINMUX63 */ 0, /* GPLINMUX64 */ 0, /* GPLINMUX65 */ 0, /* GPLINMUX66 */ 0, /* GPLINMUX67 */ 0, /* GPLINMUX68 */ 0, /* GPLINMUX69 */ 0, /* GPLINMUX70 */ 1, /* GPLMUX0 */ 1, /* GPLMUX1 */ 1, /* GPLMUX2 */ 1, /* GPLMUX3 */ 1, /* GPLMUX4 */ 1, /* GPLMUX5 */ 1, /* GPLMUX6 */ 1, /* GPLMUX7 */ 1, /* GPLMUX8 */ 1, /* GPLMUX9 */ 1, /* GPLMUX10 */ 1, /* GPLMUX11 */ 1, /* GPLMUX12 */ 1, /* GPLMUX13 */ 1, /* GPLMUX14 */ 1, /* GPLMUX15 */ 1, /* GPLMUX16 */ 1, /* GPLMUX17 */ 1, /* GPLMUX18 */ 1, /* GPLMUX19 */ 1, /* GPLMUX20 */ 1, /* GPLMUX21 */ 1, /* GPLMUX22 */ 1, /* GPLMUX23 */ 1, /* GPLMUX24 */ 1, /* GPLMUX25 */ 1, /* GPLMUX26 */ 1, /* GPLMUX27 */ 1, /* GPLMUX28 */ 1, /* GPLMUX29 */ 1, /* GPLMUX30 */ 1, /* GPLMUX31 */ 1, /* GPLMUX32 */ 1, /* GPLMUX33 */ 1, /* GPLMUX34 */ 1, /* GPLMUX35 */ 1, /* GPLMUX36 */ 1, /* GPLMUX37 */ 1, /* GPLMUX38 */ 1, /* GPLMUX39 */ 1, /* GPLMUX40 */ 1, /* GPLMUX41 */ 1, /* GPLMUX42 */ 1, /* GPLMUX43 */ 1, /* GPLMUX44 */ 1, /* GPLMUX45 */ 1, /* GPLMUX46 */ 1, /* GPLMUX47 */ 1, /* GPLMUX48 */ 1, /* GPLMUX49 */ 1, /* GPLMUX50 */ 1, /* GPLMUX51 */ 1, /* GPLMUX52 */ 1, /* GPLMUX53 */ 1, /* GPLMUX54 */ 1, /* GPLMUX55 */ 1, /* GPLMUX56 */ 1, /* GPLMUX57 */ 1, /* GPLMUX58 */ 1, /* GPLMUX59 */ 1, /* GPLMUX60 */ 1, /* GPLMUX61 */ 1, /* GPLMUX62 */ 1, /* GPLMUX63 */ 1, /* GPLMUX64 */ 1, /* GPLMUX65 */ 1, /* GPLMUX66 */ 1, /* GPLMUX67 */ 1, /* GPLMUX68 */ 1, /* GPLMUX69 */ 1, /* GPLMUX70 */ 0, /* NANDUSEFPGA */ 0, /* UART0USEFPGA */ 0, /* RGMII1USEFPGA */ 0, /* SPIS0USEFPGA */ 0, /* CAN0USEFPGA */ 0, /* I2C0USEFPGA */ 0, /* SDMMCUSEFPGA */ 0, /* QSPIUSEFPGA */ 0, /* SPIS1USEFPGA */ 0, /* RGMII0USEFPGA */ 0, /* UART1USEFPGA */ 0, /* CAN1USEFPGA */ 0, /* USB1USEFPGA */ 0, /* I2C3USEFPGA */ 0, /* I2C2USEFPGA */ 0, /* I2C1USEFPGA */ 0, /* SPIM1USEFPGA */ 0, /* USB0USEFPGA */ 0 /* SPIM0USEFPGA */ }; #endif /* <API key> */
<?php /** <API key> */ require_once 'Zend/Loader/Autoloader/Interface.php'; class <API key> implements <API key> { /** * @var string Base path to resource classes */ protected $_basePath; /** * @var array Components handled within this resource */ protected $_components = array(); /** * @var string Default resource/component to use when using object registry */ protected $<API key>; /** * @var string Namespace of classes within this resource */ protected $_namespace; /** * @var array Available resource types handled by this resource autoloader */ protected $_resourceTypes = array(); /** * Constructor * * @param array|Zend_Config $options Configuration options for resource autoloader * @return void */ public function __construct($options) { if ($options instanceof Zend_Config) { $options = $options->toArray(); } if (!is_array($options)) { require_once 'Zend/Loader/Exception.php'; throw new <API key>('Options must be passed to resource loader constructor'); } $this->setOptions($options); $namespace = $this->getNamespace(); if ((null === $namespace) || (null === $this->getBasePath()) ) { require_once 'Zend/Loader/Exception.php'; throw new <API key>('Resource loader requires both a namespace and a base path for initialization'); } if (!empty($namespace)) { $namespace .= '_'; } <API key>::getInstance()->unshiftAutoloader($this, $namespace); } /** * Overloading: methods * * Allow retrieving concrete resource object instances using 'get<Resourcename>()' * syntax. Example: * <code> * $loader = new <API key>(array( * 'namespace' => 'Stuff_', * 'basePath' => '/path/to/some/stuff', * )) * $loader->addResourceType('Model', 'models', 'Model'); * * $foo = $loader->getModel('Foo'); // get instance of Stuff_Model_Foo class * </code> * * @param string $method * @param array $args * @return mixed * @throws <API key> if method not beginning with 'get' or not matching a valid resource type is called */ public function __call($method, $args) { if ('get' == substr($method, 0, 3)) { $type = strtolower(substr($method, 3)); if (!$this->hasResourceType($type)) { require_once 'Zend/Loader/Exception.php'; throw new <API key>("Invalid resource type $type; cannot load resource"); } if (empty($args)) { require_once 'Zend/Loader/Exception.php'; throw new <API key>("Cannot load resources; no resource specified"); } $resource = array_shift($args); return $this->load($resource, $type); } require_once 'Zend/Loader/Exception.php'; throw new <API key>("Method '$method' is not supported"); } /** * Helper method to calculate the correct class path * * @param string $class * @return False if not matched other wise the correct path */ public function getClassPath($class) { $segments = explode('_', $class); $namespaceTopLevel = $this->getNamespace(); $namespace = ''; if (!empty($namespaceTopLevel)) { $namespace = array_shift($segments); if ($namespace != $namespaceTopLevel) { // wrong prefix? we're done return false; } } if (count($segments) < 2) { // assumes all resources have a component and class name, minimum return false; } $final = array_pop($segments); $component = $namespace; $lastMatch = false; do { $segment = array_shift($segments); $component .= empty($component) ? $segment : '_' . $segment; if (isset($this->_components[$component])) { $lastMatch = $component; } } while (count($segments)); if (!$lastMatch) { return false; } $final = substr($class, strlen($lastMatch) + 1); $path = $this->_components[$lastMatch]; $classPath = $path . '/' . str_replace('_', '/', $final) . '.php'; if (Zend_Loader::isReadable($classPath)) { return $classPath; } return false; } /** * Attempt to autoload a class * * @param string $class * @return mixed False if not matched, otherwise result if include operation */ public function autoload($class) { $classPath = $this->getClassPath($class); if (false !== $classPath) { return include $classPath; } return false; } /** * Set class state from options * * @param array $options * @return <API key> */ public function setOptions(array $options) { $methods = get_class_methods($this); foreach ($options as $key => $value) { $method = 'set' . ucfirst($key); if (in_array($method, $methods)) { $this->$method($value); } } return $this; } /** * Set namespace that this autoloader handles * * @param string $namespace * @return <API key> */ public function setNamespace($namespace) { $this->_namespace = rtrim((string) $namespace, '_'); return $this; } /** * Get namespace this autoloader handles * * @return string */ public function getNamespace() { return $this->_namespace; } /** * Set base path for this set of resources * * @param string $path * @return <API key> */ public function setBasePath($path) { $this->_basePath = (string) $path; return $this; } /** * Get base path to this set of resources * * @return string */ public function getBasePath() { return $this->_basePath; } /** * Add resource type * * @param string $type identifier for the resource type being loaded * @param string $path path relative to resource base path containing the resource types * @param null|string $namespace sub-component namespace to append to base namespace that qualifies this resource type * @return <API key> */ public function addResourceType($type, $path, $namespace = null) { $type = strtolower($type); if (!isset($this->_resourceTypes[$type])) { if (null === $namespace) { require_once 'Zend/Loader/Exception.php'; throw new <API key>('Initial definition of a resource type must include a namespace'); } $namespaceTopLevel = $this->getNamespace(); $namespace = ucfirst(trim($namespace, '_')); $this->_resourceTypes[$type] = array( 'namespace' => empty($namespaceTopLevel) ? $namespace : $namespaceTopLevel . '_' . $namespace, ); } if (!is_string($path)) { require_once 'Zend/Loader/Exception.php'; throw new <API key>('Invalid path specification provided; must be string'); } $this->_resourceTypes[$type]['path'] = $this->getBasePath() . '/' . rtrim($path, '\/'); $component = $this->_resourceTypes[$type]['namespace']; $this->_components[$component] = $this->_resourceTypes[$type]['path']; return $this; } /** * Add multiple resources at once * * $types should be an associative array of resource type => specification * pairs. Each specification should be an associative array containing * minimally the 'path' key (specifying the path relative to the resource * base path) and optionally the 'namespace' key (indicating the subcomponent * namespace to append to the resource namespace). * * As an example: * <code> * $loader->addResourceTypes(array( * 'model' => array( * 'path' => 'models', * 'namespace' => 'Model', * ), * 'form' => array( * 'path' => 'forms', * 'namespace' => 'Form', * ), * )); * </code> * * @param array $types * @return <API key> */ public function addResourceTypes(array $types) { foreach ($types as $type => $spec) { if (!is_array($spec)) { require_once 'Zend/Loader/Exception.php'; throw new <API key>('addResourceTypes() expects an array of arrays'); } if (!isset($spec['path'])) { require_once 'Zend/Loader/Exception.php'; throw new <API key>('addResourceTypes() expects each array to include a paths element'); } $paths = $spec['path']; $namespace = null; if (isset($spec['namespace'])) { $namespace = $spec['namespace']; } $this->addResourceType($type, $paths, $namespace); } return $this; } /** * Overwrite existing and set multiple resource types at once * * @see <API key>::addResourceTypes() * @param array $types * @return <API key> */ public function setResourceTypes(array $types) { $this->clearResourceTypes(); return $this->addResourceTypes($types); } /** * Retrieve resource type mappings * * @return array */ public function getResourceTypes() { return $this->_resourceTypes; } /** * Is the requested resource type defined? * * @param string $type * @return bool */ public function hasResourceType($type) { return isset($this->_resourceTypes[$type]); } /** * Remove the requested resource type * * @param string $type * @return <API key> */ public function removeResourceType($type) { if ($this->hasResourceType($type)) { $namespace = $this->_resourceTypes[$type]['namespace']; unset($this->_components[$namespace]); unset($this->_resourceTypes[$type]); } return $this; } /** * Clear all resource types * * @return <API key> */ public function clearResourceTypes() { $this->_resourceTypes = array(); $this->_components = array(); return $this; } /** * Set default resource type to use when calling load() * * @param string $type * @return <API key> */ public function <API key>($type) { if ($this->hasResourceType($type)) { $this-><API key> = $type; } return $this; } /** * Get default resource type to use when calling load() * * @return string|null */ public function <API key>() { return $this-><API key>; } /** * Object registry and factory * * Loads the requested resource of type $type (or uses the default resource * type if none provided). If the resource has been loaded previously, * returns the previous instance; otherwise, instantiates it. * * @param string $resource * @param string $type * @return object * @throws <API key> if resource type not specified or invalid */ public function load($resource, $type = null) { if (null === $type) { $type = $this-><API key>(); if (empty($type)) { require_once 'Zend/Loader/Exception.php'; throw new <API key>('No resource type specified'); } } if (!$this->hasResourceType($type)) { require_once 'Zend/Loader/Exception.php'; throw new <API key>('Invalid resource type specified'); } $namespace = $this->_resourceTypes[$type]['namespace']; $class = $namespace . '_' . ucfirst($resource); if (!isset($this->_resources[$class])) { $this->_resources[$class] = new $class; } return $this->_resources[$class]; } }
#!/usr/bin/python # This library is free software; you can redistribute it and/or # modify it under the terms of version 2.1 of the GNU Lesser General Public # This library is distributed in the hope that it will be useful, # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # You should have received a copy of the GNU Lesser General Public # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA import sys import time import re import os sys.path.append('/usr/lib/python') from xen.util.xmlrpclib2 import ServerProxy from optparse import * from pprint import pprint from types import DictType from getpass import getpass # Get default values from the environment SERVER_URI = os.environ.get('XAPI_SERVER_URI', 'http://localhost:9363/') SERVER_USER = os.environ.get('XAPI_SERVER_USER', '') SERVER_PASS = os.environ.get('XAPI_SERVER_PASS', '') MB = 1024 * 1024 HOST_INFO_FORMAT = '%-20s: %-50s' VM_LIST_FORMAT = '%(name_label)-18s %(memory_actual)-5s %(VCPUs_number)-5s'\ ' %(power_state)-10s %(uuid)-36s' SR_LIST_FORMAT = '%(name_label)-18s %(uuid)-36s %(physical_size)-10s' \ '%(type)-10s' VDI_LIST_FORMAT = '%(name_label)-18s %(uuid)-36s %(virtual_size)-8s' VBD_LIST_FORMAT = '%(device)-6s %(uuid)-36s %(VDI)-8s' TASK_LIST_FORMAT = '%(name_label)-18s %(uuid)-36s %(status)-8s %(progress)-4s' VIF_LIST_FORMAT = '%(name)-8s %(device)-7s %(uuid)-36s %(MAC)-10s' CONSOLE_LIST_FORMAT = '%(uuid)-36s %(protocol)-8s %(location)-32s' COMMANDS = { 'host-info': ('', 'Get Xen Host Info'), 'host-set-name': ('', 'Set host name'), 'pif-list': ('', 'List all PIFs'), 'sr-list': ('', 'List all SRs'), 'vbd-list': ('', 'List all VBDs'), 'vbd-create': ('<domname> <pycfg> [opts]', 'Create VBD attached to domname'), 'vdi-create': ('<pycfg> [opts]', 'Create a VDI'), 'vdi-list' : ('', 'List all VDI'), 'vdi-rename': ('<vdi_uuid> <new_name>', 'Rename VDI'), 'vdi-destroy': ('<vdi_uuid>', 'Delete VDI'), 'vif-create': ('<domname> <pycfg>', 'Create VIF attached to domname'), 'vtpm-create' : ('<domname> <pycfg>', 'Create VTPM attached to domname'), 'vm-create': ('<pycfg>', 'Create VM with python config'), 'vm-destroy': ('<domname>', 'Delete VM'), 'vm-list': ('[--long]', 'List all domains.'), 'vm-name': ('<uuid>', 'Name of UUID.'), 'vm-shutdown': ('<name> [opts]', 'Shutdown VM with name'), 'vm-start': ('<name>', 'Start VM with name'), 'vm-uuid': ('<name>', 'UUID of a domain by name.'), 'async-vm-start': ('<name>', 'Start VM asynchronously'), } OPTIONS = { 'sr-list': [(('-l', '--long'), {'action':'store_true', 'help':'List all properties of SR'}) ], 'vdi-list': [(('-l', '--long'), {'action':'store_true', 'help':'List all properties of VDI'}) ], 'vif-list': [(('-l', '--long'), {'action':'store_true', 'help':'List all properties of VIF'}) ], 'vm-list': [(('-l', '--long'), {'action':'store_true', 'help':'List all properties of VMs'}) ], 'vm-shutdown': [(('-f', '--force'), {'help': 'Shutdown Forcefully', 'action': 'store_true'})], 'vdi-create': [(('--name-label',), {'help': 'Name for VDI'}), (('--name-description',), {'help': 'Description for VDI'}), (('--virtual-size',), {'type': 'int', 'default': 0, 'help': 'Size of VDI in bytes'}), (('--type',), {'choices': ['system', 'user', 'ephemeral'], 'default': 'system', 'help': 'VDI type'}), (('--sharable',), {'action': 'store_true', 'help': 'VDI sharable'}), (('--read-only',), {'action': 'store_true', 'help': 'Read only'}), (('--sr',), {})], 'vbd-create': [(('--VDI',), {'help': 'UUID of VDI to attach to.'}), (('--mode',), {'choices': ['RO', 'RW'], 'help': 'device mount mode'}), (('--driver',), {'choices':['paravirtualised', 'ioemu'], 'help': 'Driver for VBD'}), (('--device',), {'help': 'Device name on guest domain'})] } class OptionError(Exception): pass class XenAPIError(Exception): pass # Extra utility functions class IterableValues(Values): """Better interface to the list of values from optparse.""" def __iter__(self): for opt, val in self.__dict__.items(): if opt[0] == '_' or callable(val): continue yield opt, val def parse_args(cmd_name, args, set_defaults = False): argstring, desc = COMMANDS[cmd_name] parser = OptionParser(usage = 'xapi %s %s' % (cmd_name, argstring), description = desc) if cmd_name in OPTIONS: for optargs, optkwds in OPTIONS[cmd_name]: parser.add_option(*optargs, **optkwds) if set_defaults: default_values = parser.get_default_values() defaults = IterableValues(default_values.__dict__) else: defaults = IterableValues() (opts, extraargs) = parser.parse_args(args = list(args), values = defaults) return opts, extraargs def execute(server, fn, args, async = False): if async: func = eval('server.Async.%s' % fn) else: func = eval('server.%s' % fn) result = func(*args) if type(result) != DictType: raise TypeError("Function returned object of type: %s" % str(type(result))) if 'Value' not in result: raise XenAPIError(*result['ErrorDescription']) return result['Value'] _initialised = False _server = None _session = None def connect(*args): global _server, _session, _initialised if not _initialised: # try without password or default credentials try: _server = ServerProxy(SERVER_URI) _session = execute(_server.session, 'login_with_password', (SERVER_USER, SERVER_PASS)) except: login = raw_input("Login: ") password = getpass() creds = (login, password) _server = ServerProxy(SERVER_URI) _session = execute(_server.session, 'login_with_password', creds) _initialised = True return (_server, _session) def _stringify(adict): return dict([(k, str(v)) for k, v in adict.items()]) def _read_python_cfg(filename): cfg = {} execfile(filename, {}, cfg) return cfg def resolve_vm(server, session, vm_name): vm_uuid = execute(server, 'VM.get_by_name_label', (session, vm_name)) if not vm_uuid: return None else: return vm_uuid[0] def resolve_vdi(server, session, vdi_name): vdi_uuid = execute(server, 'VDI.get_by_name_label', (session, vdi_name)) if not vdi_uuid: return None else: return vdi_uuid[0] # Actual commands def xapi_host_info(args, async = False): server, session = connect() hosts = execute(server, 'host.get_all', (session,)) for host in hosts: # there is only one, but .. hostinfo = execute(server, 'host.get_record', (session, host)) print HOST_INFO_FORMAT % ('Name', hostinfo['name_label']) print HOST_INFO_FORMAT % ('Version', hostinfo['software_version']) print HOST_INFO_FORMAT % ('CPUs', len(hostinfo['host_CPUs'])) print HOST_INFO_FORMAT % ('VMs', len(hostinfo['resident_VMs'])) print HOST_INFO_FORMAT % ('UUID', host) for host_cpu_uuid in hostinfo['host_CPUs']: host_cpu = execute(server, 'host_cpu.get_record', (session, host_cpu_uuid)) print 'CPU %s Util: %.2f' % (host_cpu['number'], float(host_cpu['utilisation'])) def xapi_host_set_name(args, async = False): if len(args) < 1: raise OptionError("No hostname specified") server, session = connect() hosts = execute(server, 'host.get_all', (session,)) if len(hosts) > 0: execute(server, 'host.set_name_label', (session, hosts[0], args[0])) print 'Hostname: %s' % execute(server, 'host.get_name_label', (session, hosts[0])) def xapi_vm_uuid(args, async = False): if len(args) < 1: raise OptionError("No domain name specified") server, session = connect() vm_uuid = resolve_vm(server, session, args[0]) print vm_uuid def xapi_vm_name(args, async = False): if len(args) < 1: raise OptionError("No UUID specified") server, session = connect() vm_name = execute(server, 'VM.get_name_label', (session, args[0])) print vm_name def xapi_vm_list(args, async = False): opts, args = parse_args('vm-list', args, set_defaults = True) is_long = opts and opts.long list_only = args server, session = connect() vm_uuids = execute(server, 'VM.get_all', (session,)) if not is_long: print VM_LIST_FORMAT % {'name_label':'Name', 'memory_actual':'Mem', 'VCPUs_number': 'VCPUs', 'power_state': 'State', 'uuid': 'UUID'} for uuid in vm_uuids: vm_info = execute(server, 'VM.get_record', (session, uuid)) # skip domain if we don't want if list_only and vm_info['name_label'] not in list_only: continue if is_long: vbds = vm_info['VBDs'] vifs = vm_info['VIFs'] vtpms = vm_info['VTPMs'] vif_infos = [] vbd_infos = [] vtpm_infos = [] for vbd in vbds: vbd_info = execute(server, 'VBD.get_record', (session, vbd)) vbd_infos.append(vbd_info) for vif in vifs: vif_info = execute(server, 'VIF.get_record', (session, vif)) vif_infos.append(vif_info) for vtpm in vtpms: vtpm_info = execute(server, 'VTPM.get_record', (session, vtpm)) vtpm_infos.append(vtpm_info) vm_info['VBDs'] = vbd_infos vm_info['VIFs'] = vif_infos vm_info['VTPMs'] = vtpm_infos pprint(vm_info) else: print VM_LIST_FORMAT % _stringify(vm_info) def xapi_vm_create(args, async = False): if len(args) < 1: raise OptionError("Configuration file not specified") filename = args[0] cfg = _read_python_cfg(filename) print 'Creating VM from %s ..' % filename server, session = connect() uuid = execute(server, 'VM.create', (session, cfg), async = async) print 'Done. (%s)' % uuid print uuid def xapi_vm_destroy(args, async = False): if len(args) < 1: raise OptionError("No domain name specified.") server, session = connect() vm_uuid = resolve_vm(server, session, args[0]) print 'Destroying VM %s (%s)' % (args[0], vm_uuid) success = execute(server, 'VM.destroy', (session, vm_uuid), async = async) print 'Done.' def xapi_vm_start(args, async = False): if len(args) < 1: raise OptionError("No Domain name specified.") server, session = connect() vm_uuid = resolve_vm(server, session, args[0]) print 'Starting VM %s (%s)' % (args[0], vm_uuid) success = execute(server, 'VM.start', (session, vm_uuid, False), async = async) if async: print 'Task started: %s' % success else: print 'Done.' def xapi_vm_suspend(args, async = False): if len(args) < 1: raise OptionError("No Domain name specified.") server, session = connect() vm_uuid = resolve_vm(server, session, args[0]) print 'Suspending VM %s (%s)' % (args[0], vm_uuid) success = execute(server, 'VM.suspend', (session, vm_uuid), async = async) if async: print 'Task started: %s' % success else: print 'Done.' def xapi_vm_resume(args, async = False): if len(args) < 1: raise OptionError("No Domain name specified.") server, session = connect() vm_uuid = resolve_vm(server, session, args[0]) print 'Resuming VM %s (%s)' % (args[0], vm_uuid) success = execute(server, 'VM.resume', (session, vm_uuid, False), async = async) if async: print 'Task started: %s' % success else: print 'Done.' def xapi_vm_pause(args, async = False): if len(args) < 1: raise OptionError("No Domain name specified.") server, session = connect() vm_uuid = resolve_vm(server, session, args[0]) print 'Pausing VM %s (%s)' % (args[0], vm_uuid) success = execute(server, 'VM.pause', (session, vm_uuid), async = async) if async: print 'Task started: %s' % success else: print 'Done.' def xapi_vm_unpause(args, async = False): if len(args) < 1: raise OptionError("No Domain name specified.") server, session = connect() vm_uuid = resolve_vm(server, session, args[0]) print 'Pausing VM %s (%s)' % (args[0], vm_uuid) success = execute(server, 'VM.unpause', (session, vm_uuid), async = async) if async: print 'Task started: %s' % success else: print 'Done.' def xapi_task_list(args, async = False): server, session = connect() all_tasks = execute(server, 'task.get_all', (session,)) print TASK_LIST_FORMAT % {'name_label': 'Task Name', 'uuid': 'UUID', 'status': 'Status', 'progress': '%'} for task_uuid in all_tasks: task = execute(server, 'task.get_record', (session, task_uuid)) print TASK_LIST_FORMAT % task def xapi_task_clear(args, async = False): server, session = connect() all_tasks = execute(server, 'task.get_all', (session,)) for task_uuid in all_tasks: success = execute(server, 'task.destroy', (session, task_uuid)) print 'Destroyed Task %s' % task_uuid def xapi_vm_shutdown(args, async = False): opts, args = parse_args("vm-shutdown", args, set_defaults = True) if len(args) < 1: raise OptionError("No Domain name specified.") server, session = connect() vm_uuid = resolve_vm(server, session, args[0]) if opts.force: print 'Forcefully shutting down VM %s (%s)' % (args[0], vm_uuid) success = execute(server, 'VM.hard_shutdown', (session, vm_uuid), async = async) else: print 'Shutting down VM %s (%s)' % (args[0], vm_uuid) success = execute(server, 'VM.clean_shutdown', (session, vm_uuid), async = async) if async: print 'Task started: %s' % success else: print 'Done.' def xapi_vbd_create(args, async = False): opts, args = parse_args('vbd-create', args) if len(args) < 2: raise OptionError("Configuration file and domain not specified") domname = args[0] if len(args) > 1: filename = args[1] cfg = _read_python_cfg(filename) else: cfg = {} for opt, val in opts: cfg[opt] = val print 'Creating VBD ...', server, session = connect() vm_uuid = resolve_vm(server, session, domname) cfg['VM'] = vm_uuid vbd_uuid = execute(server, 'VBD.create', (session, cfg), async = async) if async: print 'Task started: %s' % vbd_uuid else: print 'Done. (%s)' % vbd_uuid def xapi_vif_create(args, async = False): if len(args) < 2: raise OptionError("Configuration file not specified") domname = args[0] filename = args[1] cfg = _read_python_cfg(filename) print 'Creating VIF from %s ..' % filename server, session = connect() vm_uuid = resolve_vm(server, session, domname) cfg['VM'] = vm_uuid vif_uuid = execute(server, 'VIF.create', (session, cfg), async = async) if async: print 'Task started: %s' % vif_uuid else: print 'Done. (%s)' % vif_uuid def xapi_vbd_list(args, async = False): server, session = connect() domname = args[0] dom_uuid = resolve_vm(server, session, domname) vbds = execute(server, 'VM.get_VBDs', (session, dom_uuid)) print VBD_LIST_FORMAT % {'device': 'Device', 'uuid' : 'UUID', 'VDI': 'VDI'} for vbd in vbds: vbd_struct = execute(server, 'VBD.get_record', (session, vbd)) print VBD_LIST_FORMAT % vbd_struct def xapi_vbd_stats(args, async = False): server, session = connect() domname = args[0] dom_uuid = resolve_vm(server, session, domname) vbds = execute(server, 'VM.get_VBDs', (session, dom_uuid)) for vbd_uuid in vbds: print execute(server, 'VBD.get_io_read_kbs', (session, vbd_uuid)) def xapi_vif_list(args, async = False): server, session = connect() opts, args = parse_args('vdi-list', args, set_defaults = True) is_long = opts and opts.long domname = args[0] dom_uuid = resolve_vm(server, session, domname) vifs = execute(server, 'VM.get_VIFs', (session, dom_uuid)) if not is_long: print VIF_LIST_FORMAT % {'name': 'Name', 'device': 'Device', 'uuid' : 'UUID', 'MAC': 'MAC'} for vif in vifs: vif_struct = execute(server, 'VIF.get_record', (session, vif)) print VIF_LIST_FORMAT % vif_struct else: for vif in vifs: vif_struct = execute(server, 'VIF.get_record', (session, vif)) pprint(vif_struct) def xapi_console_list(args, async = False): server, session = connect() opts, args = parse_args('vdi-list', args, set_defaults = True) is_long = opts and opts.long domname = args[0] dom_uuid = resolve_vm(server, session, domname) consoles = execute(server, 'VM.get_consoles', (session, dom_uuid)) if not is_long: print CONSOLE_LIST_FORMAT % {'protocol': 'Protocol', 'location': 'Location', 'uuid': 'UUID'} for console in consoles: console_struct = execute(server, 'console.get_record', (session, console)) print CONSOLE_LIST_FORMAT % console_struct else: for console in consoles: console_struct = execute(server, 'console.get_record', (session, console)) pprint(console_struct) def xapi_vdi_list(args, async = False): opts, args = parse_args('vdi-list', args, set_defaults = True) is_long = opts and opts.long server, session = connect() vdis = execute(server, 'VDI.get_all', (session,)) if not is_long: print VDI_LIST_FORMAT % {'name_label': 'VDI Label', 'uuid' : 'UUID', 'virtual_size': 'Bytes'} for vdi in vdis: vdi_struct = execute(server, 'VDI.get_record', (session, vdi)) print VDI_LIST_FORMAT % vdi_struct else: for vdi in vdis: vdi_struct = execute(server, 'VDI.get_record', (session, vdi)) pprint(vdi_struct) def xapi_sr_list(args, async = False): opts, args = parse_args('sr-list', args, set_defaults = True) is_long = opts and opts.long server, session = connect() srs = execute(server, 'SR.get_all', (session,)) if not is_long: print SR_LIST_FORMAT % {'name_label': 'SR Label', 'uuid' : 'UUID', 'physical_size': 'Size (MB)', 'type': 'Type'} for sr in srs: sr_struct = execute(server, 'SR.get_record', (session, sr)) sr_struct['physical_size'] = int(sr_struct['physical_size'])/MB print SR_LIST_FORMAT % sr_struct else: for sr in srs: sr_struct = execute(server, 'SR.get_record', (session, sr)) pprint(sr_struct) def xapi_sr_rename(args, async = False): server, session = connect() sr = execute(server, 'SR.get_by_name_label', (session, args[0])) execute(server, 'SR.set_name_label', (session, sr[0], args[1])) def xapi_vdi_create(args, async = False): opts, args = parse_args('vdi-create', args) if len(args) > 0: cfg = _read_python_cfg(args[0]) else: cfg = {} for opt, val in opts: cfg[opt] = val server, session = connect() srs = [] if cfg.get('SR'): srs = execute(server, 'SR.get_by_name_label', (session, cfg['SR'])) else: srs = execute(server, 'SR.get_all', (session,)) sr = srs[0] cfg['SR'] = sr size = cfg['virtual_size']/MB print 'Creating VDI of size: %dMB ..' % size, uuid = execute(server, 'VDI.create', (session, cfg), async = async) if async: print 'Task started: %s' % uuid else: print 'Done. (%s)' % uuid def xapi_vdi_destroy(args, async = False): server, session = connect() if len(args) < 1: raise OptionError('Not enough arguments') vdi_uuid = args[0] print 'Deleting VDI %s' % vdi_uuid result = execute(server, 'VDI.destroy', (session, vdi_uuid), async = async) if async: print 'Task started: %s' % result else: print 'Done.' def xapi_vdi_rename(args, async = False): server, session = connect() if len(args) < 2: raise OptionError('Not enough arguments') vdi_uuid = execute(server, 'VDI.get_by_name_label', session, args[0]) vdi_name = args[1] print 'Renaming VDI %s to %s' % (vdi_uuid[0], vdi_name) result = execute(server, 'VDI.set_name_label', (session, vdi_uuid[0], vdi_name), async = async) if async: print 'Task started: %s' % result else: print 'Done.' def xapi_vtpm_create(args, async = False): server, session = connect() domname = args[0] cfg = _read_python_cfg(args[1]) vm_uuid = resolve_vm(server, session, domname) cfg['VM'] = vm_uuid print "Creating vTPM with cfg = %s" % cfg vtpm_uuid = execute(server, 'VTPM.create', (session, cfg)) print "Done. (%s)" % vtpm_uuid def xapi_pif_list(args, async = False): server, session = connect() pif_uuids = execute(server, 'PIF.get_all', (session,)) for pif_uuid in pif_uuids: pif = execute(server, 'PIF.get_record', (session, pif_uuid)) print pif def xapi_debug_wait(args, async = False): secs = 10 if len(args) > 0: secs = int(args[0]) server, session = connect() task_uuid = execute(server, 'debug.wait', (session, secs), async=async) print 'Task UUID: %s' % task_uuid def xapi_vm_stat(args, async = False): domname = args[0] server, session = connect() vm_uuid = resolve_vm(server, session, domname) vif_uuids = execute(server, 'VM.get_VIFs', (session, vm_uuid)) vbd_uuids = execute(server, 'VM.get_VBDs', (session, vm_uuid)) vcpus_utils = execute(server, 'VM.<API key>', (session, vm_uuid)) for vcpu_num in sorted(vcpus_utils.keys()): print 'CPU %s : %5.2f%%' % (vcpu_num, vcpus_utils[vcpu_num] * 100) for vif_uuid in vif_uuids: vif = execute(server, 'VIF.get_record', (session, vif_uuid)) print '%(device)s: rx: %(io_read_kbs)10.2f tx: %(io_write_kbs)10.2f' \ % vif for vbd_uuid in vbd_uuids: vbd = execute(server, 'VBD.get_record', (session, vbd_uuid)) print '%(device)s: rd: %(io_read_kbs)10.2f wr: %(io_write_kbs)10.2f' \ % vbd # Command Line Utils import cmd import shlex class XenAPICmd(cmd.Cmd): def __init__(self, server, session): cmd.Cmd.__init__(self) self.server = server self.session = session self.prompt = ">>> " def default(self, line): words = shlex.split(line) if len(words) > 0: cmd_name = words[0].replace('-', '_') is_async = 'async' in cmd_name if is_async: cmd_name = re.sub('async_', '', cmd_name) func_name = 'xapi_%s' % cmd_name func = globals().get(func_name) if func: try: args = tuple(words[1:]) func(args, async = is_async) return True except SystemExit: return False except OptionError, e: print 'Error:', str(e) return False except Exception, e: import traceback traceback.print_exc() return False print '*** Unknown command: %s' % words[0] return False def do_EOF(self, line): print sys.exit(0) def do_help(self, line): usage(print_usage = False) def emptyline(self): pass def postcmd(self, stop, line): return False def precmd(self, line): words = shlex.split(line) if len(words) > 0: words0 = words[0].replace('-', '_') return ' '.join([words0] + words[1:]) else: return line def shell(): server, session = connect() x = XenAPICmd(server, session) x.cmdloop('Xen API Prompt. Type "help" for a list of functions') def usage(command = None, print_usage = True): if not command: if print_usage: print 'Usage: xapi <subcommand> [options] [args]' print print 'Subcommands:' print for func in sorted(globals().keys()): if func.startswith('xapi_'): command = func[5:].replace('_', '-') args, description = COMMANDS.get(command, ('', '')) print '%-16s %-40s' % (command, description) print else: parse_args(command, ['-h']) def main(args): # poor man's optparse that doesn't abort on unrecognised opts options = {} remaining = [] arg_n = 0 while args: arg = args.pop(0) if arg in ('--help', '-h'): options['help'] = True elif arg in ('--server', '-s') and args: options['server'] = args.pop(0) elif arg in ('--user', '-u') and args: options['user'] = args.pop(0) elif arg in ('--password', '-p') and args: options['password'] = args.pop(0) else: remaining.append(arg) # abort here if these conditions are true if options.get('help') and not remaining: usage() sys.exit(1) if options.get('help') and remaining: usage(remaining[0]) sys.exit(1) if not remaining: usage() sys.exit(1) if options.get('server'): # it is ugly to use a global, but it is simple global SERVER_URI SERVER_URI = options['server'] if options.get('user'): global SERVER_USER SERVER_USER = options['user'] if options.get('password'): global SERVER_PASS SERVER_PASS = options['password'] subcmd = remaining[0].replace('-', '_') is_async = 'async' in subcmd if is_async: subcmd = re.sub('async_', '', subcmd) subcmd_func_name = 'xapi_' + subcmd subcmd_func = globals().get(subcmd_func_name, None) if subcmd == 'shell': shell() elif not subcmd_func or not callable(subcmd_func): print 'Error: Unable to find subcommand \'%s\'' % subcmd usage() sys.exit(1) try: subcmd_func(remaining[1:], async = is_async) except XenAPIError, e: print 'Error: %s' % str(e.args[0]) sys.exit(2) except OptionError, e: print 'Error: %s' % e sys.exit(0) if __name__ == "__main__": import sys main(sys.argv[1:])
#include <errno.h> #include <stdlib.h> /* Execute LINE as a shell command. */ int __libc_system (line) const char *line; { if (line == NULL) return 0; /* This indicates no command processor. */ __sys_errno (ENOSYS); return -1; } weak_alias (__libc_system, system) stub_warning (system) #include <stub-tag.h>
package org.nbheaven.sqe.codedefects.history.action; import java.awt.EventQueue; import java.awt.event.ActionEvent; import java.util.Collection; import javax.swing.AbstractAction; import javax.swing.Action; import org.nbheaven.sqe.codedefects.core.util.<API key>; import org.nbheaven.sqe.codedefects.history.util.<API key>; import org.netbeans.api.project.Project; import org.openide.util.ContextAwareAction; import org.openide.util.ImageUtilities; import org.openide.util.Lookup; import org.openide.util.LookupEvent; import org.openide.util.LookupListener; import org.openide.util.NbBundle; import org.openide.util.Utilities; /** * * @author Sven Reimers */ public class SnapshotAction extends AbstractAction implements LookupListener, ContextAwareAction { private Lookup context; private Lookup.Result<Project> lkpInfo; public SnapshotAction() { this(Utilities.<API key>()); } public SnapshotAction(Lookup context) { putValue("noIconInMenu", Boolean.TRUE); // NOI18N putValue(Action.SHORT_DESCRIPTION, NbBundle.getMessage(SnapshotAction.class, "HINT_Action")); putValue(SMALL_ICON, ImageUtilities.image2Icon(ImageUtilities.loadImage("org/nbheaven/sqe/codedefects/history/resources/camera.png"))); this.context = context; //The thing we want to listen for the presence or absence of //on the global selection Lookup.Template<Project> tpl = new Lookup.Template<Project>(Project.class); lkpInfo = context.lookup(tpl); lkpInfo.addLookupListener(this); resultChanged(null); } @Override public Action <API key>(Lookup context) { return new SnapshotAction(context); } @Override public void resultChanged(LookupEvent ev) { updateEnableState(); } public String getName() { return NbBundle.getMessage(SnapshotAction.class, "LBL_Action"); } @Override public void actionPerformed(ActionEvent actionEvent) { if (null != getActiveProject()) { Project project = getActiveProject(); <API key>.addSnapshot(project); } } private void updateEnableState() { if (!EventQueue.isDispatchThread()) { EventQueue.invokeLater(() -> updateEnableState()); return; } setEnabled(<API key>.<API key>(getActiveProject())); } private Project getActiveProject() { Collection<? extends Project> projects = lkpInfo.allInstances(); if (projects.size() == 1) { Project project = projects.iterator().next(); return project; } return null; } }
#ifndef <API key> #define <API key> #define ROUNDS_DEFAULT 5000 #define ROUNDS_MIN 1000 #define ROUNDS_MAX ROUNDS_DEFAULT #define MIXCHARS 32 #define CRYPT_SALT_LENGTH 20 #define CRYPT_MAGIC_LENGTH 3 #define CRYPT_PARAM_LENGTH 13 #define SHA256_HASH_LENGTH 43 #define <API key> (CRYPT_SALT_LENGTH + \ SHA256_HASH_LENGTH + \ CRYPT_MAGIC_LENGTH + \ CRYPT_PARAM_LENGTH) #define <API key> 256 #include <stddef.h> #include <my_global.h> int extract_user_salt(char **salt_begin, char **salt_end); C_MODE_START char * my_crypt_genhash(char *ctbuffer, size_t ctbufflen, const char *plaintext, int plaintext_len, const char *switchsalt, const char **params); void generate_user_salt(char *buffer, int buffer_len); void xor_string(char *to, int to_len, char *pattern, int pattern_len); C_MODE_END #endif
/*! \file * * \brief Trivial application to read an extension into a variable * * \author David Chappell <David.Chappell@trincoll.edu> * * \ingroup applications */ #include "asterisk.h" <API key>() #include "asterisk/file.h" #include "asterisk/pbx.h" #include "asterisk/app.h" #include "asterisk/module.h" #include "asterisk/indications.h" #include "asterisk/channel.h" enum <API key> { OPT_SKIP = (1 << 0), OPT_INDICATION = (1 << 1), OPT_NOANSWER = (1 << 2), }; AST_APP_OPTIONS(<API key>, { AST_APP_OPTION('s', OPT_SKIP), AST_APP_OPTION('i', OPT_INDICATION), AST_APP_OPTION('n', OPT_NOANSWER), }); static char *app = "ReadExten"; static int readexten_exec(struct ast_channel *chan, const char *data) { int res = 0; char exten[256] = ""; int maxdigits = sizeof(exten) - 1; int timeout = 0, digit_timeout = 0, x = 0; char *argcopy = NULL, *status = ""; struct ast_tone_zone_sound *ts = NULL; struct ast_flags flags = {0}; <API key>(arglist, AST_APP_ARG(variable); AST_APP_ARG(filename); AST_APP_ARG(context); AST_APP_ARG(options); AST_APP_ARG(timeout); ); if (ast_strlen_zero(data)) { ast_log(LOG_WARNING, "ReadExten requires at least one argument\n"); <API key>(chan, "READEXTENSTATUS", "ERROR"); return 0; } argcopy = ast_strdupa(data); <API key>(arglist, argcopy); if (ast_strlen_zero(arglist.variable)) { ast_log(LOG_WARNING, "Usage: ReadExten(variable[,filename[,context[,options[,timeout]]]])\n"); <API key>(chan, "READEXTENSTATUS", "ERROR"); return 0; } if (ast_strlen_zero(arglist.filename)) { arglist.filename = NULL; } if (ast_strlen_zero(arglist.context)) { arglist.context = ast_strdupa(ast_channel_context(chan)); } if (!ast_strlen_zero(arglist.options)) { <API key>(<API key>, &flags, NULL, arglist.options); } if (!ast_strlen_zero(arglist.timeout)) { timeout = atoi(arglist.timeout); if (timeout > 0) timeout *= 1000; } if (timeout <= 0) timeout = ast_channel_pbx(chan) ? ast_channel_pbx(chan)->rtimeoutms : 10000; if (digit_timeout <= 0) digit_timeout = ast_channel_pbx(chan) ? ast_channel_pbx(chan)->dtimeoutms : 5000; if (ast_test_flag(&flags, OPT_INDICATION) && !ast_strlen_zero(arglist.filename)) { ts = <API key>(ast_channel_zone(chan), arglist.filename); } do { if (ast_channel_state(chan) != AST_STATE_UP) { if (ast_test_flag(&flags, OPT_SKIP)) { /* At the user's option, skip if the line is not up */ <API key>(chan, arglist.variable, ""); status = "SKIP"; break; } else if (!ast_test_flag(&flags, OPT_NOANSWER)) { /* Otherwise answer unless we're supposed to read while on-hook */ res = ast_answer(chan); } } if (res < 0) { status = "HANGUP"; break; } ast_playtones_stop(chan); ast_stopstream(chan); if (ts && ts->data[0]) { res = ast_playtones_start(chan, 0, ts->data, 0); } else if (arglist.filename) { if (ast_test_flag(&flags, OPT_INDICATION) && ast_fileexists(arglist.filename, NULL, <API key>(chan)) <= 0) { /* * We were asked to play an indication that did not exist in the config. * If no such file exists, play it as a tonelist. With any luck they won't * have a file named "350+440.ulaw" * (but honestly, who would do something so silly?) */ res = ast_playtones_start(chan, 0, arglist.filename, 0); } else { res = ast_streamfile(chan, arglist.filename, <API key>(chan)); } } for (x = 0; x < maxdigits; x++) { ast_debug(3, "extension so far: '%s', timeout: %d\n", exten, timeout); res = ast_waitfordigit(chan, timeout); ast_playtones_stop(chan); ast_stopstream(chan); timeout = digit_timeout; if (res < 1) { /* timeout expired or hangup */ if (ast_check_hangup(chan)) { status = "HANGUP"; } else if (x == 0) { <API key>(chan, arglist.variable, "t"); status = "TIMEOUT"; } break; } exten[x] = res; if (!<API key>(chan, arglist.context, exten, 1 /* priority */, S_COR(ast_channel_caller(chan)->id.number.valid, ast_channel_caller(chan)->id.number.str, NULL))) { if (!<API key>(chan, arglist.context, exten, 1, S_COR(ast_channel_caller(chan)->id.number.valid, ast_channel_caller(chan)->id.number.str, NULL)) && res == ' exten[x] = '\0'; } break; } } if (!ast_strlen_zero(status)) break; if (<API key>(chan, arglist.context, exten, 1, S_COR(ast_channel_caller(chan)->id.number.valid, ast_channel_caller(chan)->id.number.str, NULL))) { ast_debug(3, "User entered valid extension '%s'\n", exten); <API key>(chan, arglist.variable, exten); status = "OK"; } else { ast_debug(3, "User dialed invalid extension '%s' in context '%s' on %s\n", exten, arglist.context, ast_channel_name(chan)); <API key>(chan, arglist.variable, "i"); <API key>(chan, "INVALID_EXTEN", exten); status = "INVALID"; } } while (0); if (ts) { ts = <API key>(ts); } <API key>(chan, "READEXTENSTATUS", status); return status[0] == 'H' ? -1 : 0; } static int unload_module(void) { int res = <API key>(app); return res; } static int load_module(void) { int res = <API key>(app, readexten_exec); return res; } <API key>(ASTERISK_GPL_KEY, "Read and evaluate extension validity");
<?php defined('_JEXEC') or die(); if (!class_exists('VmMediaHandler')) require(<API key>.DS.'helpers'.DS.'mediahandler.php'); class VmImage extends VmMediaHandler { function processAction($data){ if(empty($data['media_action'])) return $data; $data = parent::processAction($data); if( $data['media_action'] == 'upload_create_thumb' ){ $oldFileUrl = $this->file_url; $file_name = $this->uploadFile($this->file_url_folder); if($file_name){ if($file_name!=$oldFileUrl && !empty($this->filename)){ $this->deleteFile($oldFileUrl); } $this->file_url = $this->file_url_folder.$file_name; $this->filename = $file_name; $oldFileUrlThumb = $this->file_url_thumb; $this->file_url_thumb = $this->createThumb(); if($this->file_url_thumb!=$oldFileUrlThumb){ $this->deleteFile($oldFileUrlThumb); } } } //creating the thumbnail image else if( $data['media_action'] == 'create_thumb' ){ $this->file_url_thumb = $this->createThumb(); } if(empty($this->file_title) && !empty($file_name)) $this->file_title = $file_name; return $data; } function displayMediaFull($imageArgs='',$lightbox=true,$effect ="class='modal'",$description = true ){ if(!$this->file_is_forSale){ // Remote image URL if( substr( $this->file_url, 0, 4) == "http" ) { $file_url = $this->file_url; $file_alt = $this->file_title; } else { $rel_path = str_replace('/',DS,$this->file_url_folder); $<API key> = JPATH_ROOT.DS.$rel_path.$this->file_name.'.'.$this->file_extension; if (!file_exists($<API key>)) { $file_url = $this->theme_url.'assets/images/vmgeneral/'.VmConfig::get('no_image_found'); $file_alt = JText::_('<API key>').' '.$this->file_description; } else { $file_url = $this->file_url; $file_alt = $this->file_meta; } } $postText = ''; if($description) $postText = $this->file_description; return $this->displayIt($file_url, $file_alt, $imageArgs,$lightbox,'',$postText); } else { //Media which should be sold, show them only as thumb (works as preview) return $this->displayMediaThumb('id="vm_display_image"',false); } } /** * a small function that ensures that we always build the thumbnail name with the same method */ public function createThumbName($width=0,$height=0){ if(empty($this->file_name)) return false; if(empty($width)) $width = VmConfig::get('img_width', 90); if(empty($height)) $height = VmConfig::get('img_height', 90); $this->file_name_thumb = $this->file_name.'_'.$width.'x'.$height; return $this->file_name_thumb; } /** * This function actually creates the thumb * and when it is instanciated with one of the getImage function automatically updates the db * * @author Max Milbers * @param boolean $save Execute update function * @return name of the thumbnail */ public function createThumb() { $synchronise = JRequest::getString('synchronise',false); if(!VmConfig::get('img_resize_enable') || $synchronise) return; //now lets create the thumbnail, saving is done in this function $width = VmConfig::get('img_width', 90); $height = VmConfig::get('img_height', 90); // Don't allow sizes beyond 2000 pixels //I dont think that this is good, should be config // $width = min($width, 2000); // $height = min($height, 2000); $maxsize = false; $bgred = 255; $bggreen = 255; $bgblue = 255; $root = ''; if($this->file_is_forSale==0){ $rel_path = str_replace('/',DS,$this->file_url_folder); $<API key> = JPATH_ROOT.DS.$rel_path.$this->file_name.'.'.$this->file_extension; } else { $rel_path = str_replace('/',DS,$this->file_url_folder); $<API key> = $this->file_url_folder.$this->file_name.'.'.$this->file_extension; } $this->file_name_thumb = $this->createThumbName(); $file_path_thumb = str_replace('/',DS,$this-><API key>); $resizedFilenamePath = JPATH_ROOT.DS.$file_path_thumb.$this->file_name_thumb.'.'.$this->file_extension; $this-><API key>($file_path_thumb); if (file_exists($<API key>)) { if (!class_exists('Img2Thumb')) require(<API key>.DS.'helpers'.DS.'img2thumb.php'); $createdImage = new Img2Thumb($<API key>, $width, $height, $resizedFilenamePath, $maxsize, $bgred, $bggreen, $bgblue); if($createdImage){ return $this-><API key>.$this->file_name_thumb.'.'.$this->file_extension; } else { return 0; } } else { vmError('Couldnt create thumb, file not found '.$<API key>); return 0; } } public function <API key>($path){ $elements = explode(DS,$path); $examine = JPATH_ROOT; foreach($elements as $piece){ $examine = $examine.DS.$piece; if(!JFolder::exists($examine)){ JFolder::create($examine); vmInfo('create folder for resized image '.$examine); } } } /** * Display an image icon for the given image and create a link to the given link. * * @param string $link Link to use in the href tag * @param string $image Name of the image file to display * @param string $text Text to use for the image alt text and to display under the image. */ public function displayImageButton($link, $imageclass, $text) { $button = '<a title="' . $text . '" href="' . $link . '">'; $button .= '<span class="vmicon48 '.$imageclass.'"></span>'; $button .= '<br />' . $text.'</a>'; echo $button; } }
#ifndef <API key> #define <API key> #include "<API key>.h" #include "<API key>.h" #include "<API key>.h" #include "<API key>.h" #include "<API key>.h" #include "<API key>.h" #include "<API key>.h" #include "<API key>.h" #include "<API key>.h" #endif /* <API key> */
package spim.fiji.plugin; import ij.gui.GenericDialog; import ij.plugin.PlugIn; import java.io.File; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Random; import mpicbg.spim.data.sequence.Channel; import mpicbg.spim.data.sequence.ViewDescription; import mpicbg.spim.data.sequence.ViewId; import mpicbg.spim.data.sequence.VoxelDimensions; import mpicbg.spim.io.IOFunctions; import net.imglib2.KDTree; import net.imglib2.RealPoint; import net.imglib2.neighborsearch.<API key>; import spim.fiji.plugin.queryXML.LoadParseQueryXML; import spim.fiji.plugin.thinout.<API key>; import spim.fiji.plugin.thinout.Histogram; import spim.fiji.spimdata.SpimData2; import spim.fiji.spimdata.interestpoints.InterestPoint; import spim.fiji.spimdata.interestpoints.InterestPointList; import spim.fiji.spimdata.interestpoints.<API key>; import spim.fiji.spimdata.interestpoints.ViewInterestPoints; public class ThinOut_Detections implements PlugIn { public static boolean[] <API key>; public static int[] defaultSubSampling; public static String[] defaultNewLabels; public static int[] defaultRemoveKeep; public static double[] <API key>, <API key>; public static String[] removeKeepChoice = new String[]{ "Remove Range", "Keep Range" }; public static double <API key> = 0; public static double <API key> = 5; public static int <API key> = 1; public static String defaultNewLabelText = "thinned-out"; public static int <API key> = 0; // 0 == remove, 1 == keep @Override public void run( final String arg ) { final LoadParseQueryXML xml = new LoadParseQueryXML(); if ( !xml.queryXML( "", true, false, true, true ) ) return; final SpimData2 data = xml.getData(); final List< ViewId > viewIds = SpimData2.getAllViewIdsSorted( data, xml.<API key>(), xml.<API key>() ); // ask which channels have the objects we are searching for final List< <API key> > channels = <API key>( data, viewIds ); if ( channels == null ) return; // get the actual min/max thresholds for cutting out if ( !<API key>( data, viewIds, channels ) ) return; // thin out detections and save the new interestpoint files if ( !thinOut( data, viewIds, channels, true ) ) return; // write new xml SpimData2.saveXML( data, xml.getXMLFileName(), xml.getClusterExtension() ); } public static boolean thinOut( final SpimData2 spimData, final List< ViewId > viewIds, final List< <API key> > channels, final boolean save ) { final ViewInterestPoints vip = spimData.<API key>(); for ( final <API key> channel : channels ) { final double minDistance = channel.getMin(); final double maxDistance = channel.getMax(); final boolean keepRange = channel.keepRange(); for ( final ViewId viewId : viewIds ) { final ViewDescription vd = spimData.<API key>().getViewDescription( viewId ); if ( !vd.isPresent() || vd.getViewSetup().getChannel().getId() != channel.getChannel().getId() ) continue; final <API key> vipl = vip.<API key>( viewId ); final InterestPointList oldIpl = vipl.<API key>( channel.getLabel() ); if ( oldIpl.getInterestPoints() == null ) oldIpl.loadInterestPoints(); final VoxelDimensions voxelSize = vd.getViewSetup().getVoxelSize(); // assemble the list of points (we need two lists as the KDTree sorts the list) // we assume that the order of list2 and points is preserved! final List< RealPoint > list1 = new ArrayList< RealPoint >(); final List< RealPoint > list2 = new ArrayList< RealPoint >(); final List< double[] > points = new ArrayList< double[] >(); for ( final InterestPoint ip : oldIpl.getInterestPoints() ) { list1.add ( new RealPoint( ip.getL()[ 0 ] * voxelSize.dimension( 0 ), ip.getL()[ 1 ] * voxelSize.dimension( 1 ), ip.getL()[ 2 ] * voxelSize.dimension( 2 ) ) ); list2.add ( new RealPoint( ip.getL()[ 0 ] * voxelSize.dimension( 0 ), ip.getL()[ 1 ] * voxelSize.dimension( 1 ), ip.getL()[ 2 ] * voxelSize.dimension( 2 ) ) ); points.add( ip.getL() ); } // make the KDTree final KDTree< RealPoint > tree = new KDTree< RealPoint >( list1, list1 ); // Nearest neighbor for each point, populate the new list final <API key>< RealPoint > nn = new <API key>< RealPoint >( tree, 2 ); final InterestPointList newIpl = new InterestPointList( oldIpl.getBaseDir(), new File( oldIpl.getFile().getParentFile(), "tpId_" + viewId.getTimePointId() + "_viewSetupId_" + viewId.getViewSetupId() + "." + channel.getNewLabel() ) ); newIpl.setInterestPoints( new ArrayList< InterestPoint >() ); int id = 0; for ( int j = 0; j < list2.size(); ++j ) { final RealPoint p = list2.get( j ); nn.search( p ); // first nearest neighbor is the point itself, we need the second nearest final double d = nn.getDistance( 1 ); if ( ( keepRange && d >= minDistance && d <= maxDistance ) || ( !keepRange && ( d < minDistance || d > maxDistance ) ) ) { newIpl.getInterestPoints().add( new InterestPoint( id++, points.get( j ).clone() ) ); } } if ( keepRange ) newIpl.setParameters( "thinned-out '" + channel.getLabel() + "', kept range from " + minDistance + " to " + maxDistance ); else newIpl.setParameters( "thinned-out '" + channel.getLabel() + "', removed range from " + minDistance + " to " + maxDistance ); vipl.<API key>( channel.getNewLabel(), newIpl ); IOFunctions.println( new Date( System.currentTimeMillis() ) + ": TP=" + vd.getTimePointId() + " ViewSetup=" + vd.getViewSetupId() + ", Detections: " + oldIpl.getInterestPoints().size() + " >>> " + newIpl.getInterestPoints().size() ); if ( save && !newIpl.saveInterestPoints() ) { IOFunctions.println( "Error saving interest point list: " + new File( newIpl.getBaseDir(), newIpl.getFile().toString() + newIpl.<API key>() ) ); return false; } } } return true; } public static boolean <API key>( final SpimData2 spimData, final List< ViewId > viewIds, final List< <API key> > channels ) { for ( final <API key> channel : channels ) if ( channel.showHistogram() ) plotHistogram( spimData, viewIds, channel ); if ( <API key> == null || <API key>.length != channels.size() || <API key> == null || <API key>.length != channels.size() ) { <API key> = new double[ channels.size() ]; <API key> = new double[ channels.size() ]; for ( int i = 0; i < channels.size(); ++i ) { <API key>[ i ] = <API key>; <API key>[ i ] = <API key>; } } if ( defaultRemoveKeep == null || defaultRemoveKeep.length != channels.size() ) { defaultRemoveKeep = new int[ channels.size() ]; for ( int i = 0; i < channels.size(); ++i ) defaultRemoveKeep[ i ] = <API key>; } final GenericDialog gd = new GenericDialog( "Define cut-off threshold" ); for ( int c = 0; c < channels.size(); ++c ) { final <API key> channel = channels.get( c ); gd.addChoice( "Channel_" + channel.getChannel().getName() + "_", removeKeepChoice, removeKeepChoice[ defaultRemoveKeep[ c ] ] ); gd.addNumericField( "Channel_" + channel.getChannel().getName() + "<API key>", <API key>[ c ], 2 ); gd.addNumericField( "Channel_" + channel.getChannel().getName() + "<API key>", <API key>[ c ], 2 ); gd.addMessage( "" ); } gd.showDialog(); if ( gd.wasCanceled() ) return false; for ( int c = 0; c < channels.size(); ++c ) { final <API key> channel = channels.get( c ); final int removeKeep = defaultRemoveKeep[ c ] = gd.getNextChoiceIndex(); if ( removeKeep == 1 ) channel.setKeepRange( true ); else channel.setKeepRange( false ); channel.setMin( <API key>[ c ] = gd.getNextNumber() ); channel.setMax( <API key>[ c ] = gd.getNextNumber() ); if ( channel.getMin() >= channel.getMax() ) { IOFunctions.println( "You selected the minimal threshold larger than the maximal threshold for channel " + channel.getChannel().getName() ); IOFunctions.println( "Stopping." ); return false; } else { if ( channel.keepRange() ) IOFunctions.println( "Channel " + channel.getChannel().getName() + ": keep only distances from " + channel.getMin() + " >>> " + channel.getMax() ); else IOFunctions.println( "Channel " + channel.getChannel().getName() + ": remove distances from " + channel.getMin() + " >>> " + channel.getMax() ); } } return true; } public static Histogram plotHistogram( final SpimData2 spimData, final List< ViewId > viewIds, final <API key> channel ) { final ViewInterestPoints vip = spimData.<API key>(); // list of all distances final ArrayList< Double > distances = new ArrayList< Double >(); final Random rnd = new Random( System.currentTimeMillis() ); String unit = null; for ( final ViewId viewId : viewIds ) { final ViewDescription vd = spimData.<API key>().getViewDescription( viewId ); if ( !vd.isPresent() || vd.getViewSetup().getChannel().getId() != channel.getChannel().getId() ) continue; final <API key> vipl = vip.<API key>( viewId ); final InterestPointList ipl = vipl.<API key>( channel.getLabel() ); final VoxelDimensions voxelSize = vd.getViewSetup().getVoxelSize(); if ( ipl.getInterestPoints() == null ) ipl.loadInterestPoints(); if ( unit == null ) unit = vd.getViewSetup().getVoxelSize().unit(); // assemble the list of points final List< RealPoint > list = new ArrayList< RealPoint >(); for ( final InterestPoint ip : ipl.getInterestPoints() ) { list.add ( new RealPoint( ip.getL()[ 0 ] * voxelSize.dimension( 0 ), ip.getL()[ 1 ] * voxelSize.dimension( 1 ), ip.getL()[ 2 ] * voxelSize.dimension( 2 ) ) ); } // make the KDTree final KDTree< RealPoint > tree = new KDTree< RealPoint >( list, list ); // Nearest neighbor for each point final <API key>< RealPoint > nn = new <API key>< RealPoint >( tree, 2 ); for ( final RealPoint p : list ) { // every n'th point only if ( rnd.nextDouble() < 1.0 / (double)channel.getSubsampling() ) { nn.search( p ); // first nearest neighbor is the point itself, we need the second nearest distances.add( nn.getDistance( 1 ) ); } } } final Histogram h = new Histogram( distances, 100, "Distance Histogram [Channel=" + channel.getChannel().getName() + "]", unit ); h.showHistogram(); IOFunctions.println( "Channel " + channel.getChannel().getName() + ": min distance=" + h.getMin() + ", max distance=" + h.getMax() ); return h; } public static ArrayList< <API key> > <API key>( final SpimData2 spimData, final List< ViewId > viewIds ) { // build up the dialog final GenericDialog gd = new GenericDialog( "Choose segmentations to thin out" ); final List< Channel > channels = SpimData2.<API key>( spimData, viewIds ); final int nAllChannels = spimData.<API key>().<API key>().size(); if ( <API key>.<API key> == null || <API key>.<API key>.length != nAllChannels ) <API key>.<API key> = new int[ nAllChannels ]; if ( <API key> == null || <API key>.length != channels.size() ) { <API key> = new boolean[ channels.size() ]; for ( int i = 0; i < channels.size(); ++i ) <API key>[ i ] = true; } if ( defaultSubSampling == null || defaultSubSampling.length != channels.size() ) { defaultSubSampling = new int[ channels.size() ]; for ( int i = 0; i < channels.size(); ++i ) defaultSubSampling[ i ] = <API key>; } if ( defaultNewLabels == null || defaultNewLabels.length != channels.size() ) { defaultNewLabels = new String[ channels.size() ]; for ( int i = 0; i < channels.size(); ++i ) defaultNewLabels[ i ] = defaultNewLabelText; } // check which channels and labels are available and build the choices final ArrayList< String[] > channelLabels = new ArrayList< String[] >(); int j = 0; for ( final Channel channel : channels ) { final String[] labels = <API key>.<API key>( spimData, viewIds, channel, "thin out" ); if ( <API key>.<API key>[ j ] >= labels.length ) <API key>.<API key>[ j ] = 0; String ch = channel.getName().replace( ' ', '_' ); gd.addCheckbox( "Channel_" + ch + "<API key>", <API key>[ j ] ); gd.addChoice( "Channel_" + ch + "_Interest_points", labels, labels[ <API key>.<API key>[ j ] ] ); gd.addStringField( "Channel_" + ch + "_New_label", defaultNewLabels[ j ], 20 ); gd.addNumericField( "Channel_" + ch + "_Subsample histogram", defaultSubSampling[ j ], 0, 5, "times" ); channelLabels.add( labels ); ++j; } gd.showDialog(); if ( gd.wasCanceled() ) return null; // assemble which channels have been selected with with label final ArrayList< <API key> > channelsToProcess = new ArrayList< <API key> >(); j = 0; for ( final Channel channel : channels ) { final boolean showHistogram = <API key>[ j ] = gd.getNextBoolean(); final int channelChoice = <API key>.<API key>[ j ] = gd.getNextChoiceIndex(); final String newLabel = defaultNewLabels[ j ] = gd.getNextString(); final int subSampling = defaultSubSampling[ j ] = (int)Math.round( gd.getNextNumber() ); if ( channelChoice < channelLabels.get( j ).length - 1 ) { String label = channelLabels.get( j )[ channelChoice ]; if ( label.contains( <API key>.warningLabel ) ) label = label.substring( 0, label.indexOf( <API key>.warningLabel ) ); channelsToProcess.add( new <API key>( channel, label, newLabel, showHistogram, subSampling ) ); } ++j; } return channelsToProcess; } public static void main( final String[] args ) { new ThinOut_Detections().run( null ); } }
<? require_once("pdfoprdesubi.php"); $obj= new pdfreporte(); # $obj->AddPage(); # $obj->AliasNbPages(); # $obj->Cuerpo(); # $obj->Output(); $tb=$obj->bd->select($obj->sql); if (!$tb->EOF) { //HAY DATOS $obj->AliasNbPages(); $obj->AddPage(); $obj->Cuerpo(); $obj->Output(); } else { //NO HAY DATOS ?> <script> alert('No hay informacion para procesar este reporte...'); location=("oprdesubi.php"); </script> <? } ?>
<?php namespace Glpi\Tests\Api\Deprecated; interface DeprecatedInterface { /** * Get deprecated type * @return string */ public static function getDeprecatedType(): string; /** * Get current type * @return string */ public static function getCurrentType(): string; /** * Get deprecated expected fields * @return array */ public static function getDeprecatedFields(): array; /** * Get current add input * @return array */ public static function getCurrentAddInput(): array; /** * Get deprecated add input * @return array */ public static function <API key>(): array; /** * Get deprecated update input * @return array */ public static function <API key>(): array; /** * Get expected data after insert * @return array */ public static function <API key>(): array; /** * Get expected data after update * @return array */ public static function <API key>(): array; /** * Get deprecated search query * @return string */ public static function <API key>(): string; /** * Get current search query * @return string */ public static function <API key>(): string; }
#include <linux/console.h> #include <linux/kernel.h> #include <linux/init.h> #include <linux/version.h> #include <linux/platform_device.h> #include <linux/serial_8250.h> #include <linux/clk.h> #include <linux/mtd/mtd.h> #include <linux/mtd/partitions.h> #include <linux/dma-mapping.h> #include <linux/fsl_devices.h> #include <linux/platform_data/tegra_usb.h> #include <linux/pda_power.h> #include <linux/gpio.h> #include <linux/delay.h> #include <linux/reboot.h> #include <linux/i2c-tegra.h> #include <linux/memblock.h> #include <linux/antares_dock.h> #include <asm/mach-types.h> #include <asm/mach/arch.h> #include <asm/mach/time.h> #include <asm/setup.h> #include <mach/io.h> #include <mach/w1.h> #include <mach/iomap.h> #include <mach/irqs.h> #include <mach/nand.h> #include <mach/iomap.h> #include <mach/sdhci.h> #include <mach/gpio.h> #include <mach/clk.h> #include <mach/usb_phy.h> #include <mach/i2s.h> #include <mach/system.h> #include <linux/nvmap.h> #include "board.h" #include "board-smba1002.h" #include "clock.h" #include "gpio-names.h" #include "devices.h" #include "pm.h" #include "wakeups-t2.h" #include "wdt-recovery.h" #include <linux/rfkill-gpio.h> #define PMC_CTRL 0x0 #define PMC_CTRL_INTR_LOW (1 << 17) static struct <API key> bluetooth_rfkill = { .name = "bluetooth_rfkill", .shutdown_gpio = -1, .reset_gpio = SMBA1002_BT_RESET, .type = <API key>, }; static struct platform_device <API key> = { .name = "rfkill_gpio", .id = -1, .dev = { .platform_data = &bluetooth_rfkill, }, }; #ifdef CONFIG_BT_BLUEDROID extern void <API key>(struct platform_device *uart_dev); #endif void __init <API key>(void) { /*Add Clock Resource*/ clk_add_alias("bcm4329_32k_clk", <API key>.name, \ "blink", NULL); #ifdef CONFIG_BT_BLUEDROID <API key>(&tegra_uartc_device); #endif return; } static struct resource <API key>[] = { [0] = { .name = "gpio_host_wake", .start = SMBA1002_BT_IRQ, .end = SMBA1002_BT_IRQ, .flags = IORESOURCE_IO, }, [1] = { .name = "gpio_ext_wake", .start = SMBA1002_BT_WAKEUP, .end = SMBA1002_BT_WAKEUP, .flags = IORESOURCE_IO, }, [2] = { .name = "host_wake", .start = TEGRA_GPIO_TO_IRQ(SMBA1002_BT_IRQ), .end = TEGRA_GPIO_TO_IRQ(SMBA1002_BT_IRQ), .flags = IORESOURCE_IRQ | <API key>, }, }; static struct platform_device <API key> = { .name = "bluesleep", .id = -1, .num_resources = ARRAY_SIZE(<API key>), .resource = <API key>, }; static struct dock_platform_data <API key> = { .irq = TEGRA_GPIO_TO_IRQ(SMBA1002_DOCK), .gpio_num = SMBA1002_DOCK, }; static struct platform_device tegra_dock_device = { .name = "tegra_dock", .id = -1, .dev = { .platform_data = &<API key>, }, }; static struct platform_device *smba_devices[] __initdata = { &tegra_pmu_device, &tegra_gart_device, &tegra_aes_device, &<API key>, &<API key>, &tegra_wdt_device, &tegra_avp_device, &tegra_dock_device }; static void __init tegra_smba_init(void) { /* Initialize the pinmux */ smba_pinmux_init(); /* Initialize the clocks - clocks require the pinmux to be initialized first */ smba_clks_init(); <API key>(smba_devices,ARRAY_SIZE(smba_devices)); /* Register i2c devices - required for Power management and MUST be done before the power register */ <API key>(); /* Register the power subsystem - Including the poweroff handler - Required by all the others */ smba_charge_init(); smba_regulator_init(); /* Register the USB device */ <API key>(); /* Register UART devices */ <API key>(); /* Register RAM Console */ <API key>(); /* Register GPU devices */ smba_panel_init(); /* Register Audio devices */ <API key>(); /* Register all the keyboard devices */ smba_keys_init(); /* Register touchscreen devices */ <API key>(); /* Register accelerometer device */ <API key>(); /* Register Camera powermanagement devices */ <API key>(); /* Register NAND flash devices */ <API key>(); /* Register SDHCI devices */ smba_sdhci_init(); /* Register Bluetooth powermanagement devices */ <API key>(); /* Release the tegra bootloader framebuffer */ <API key>(); } static void __init tegra_smba_reserve(void) { if (memblock_reserve(0x0, 4096) < 0) pr_warn("Cannot reserve first 4K of memory for safety\n"); /* Reserve the graphics memory */ tegra_reserve(<API key>, <API key>, <API key>); <API key>(SZ_1M); } static void __init tegra_smba_fixup(struct machine_desc *desc, struct tag *tags, char **cmdline, struct meminfo *mi) { mi->nr_banks = SMBA1002_MEM_BANKS; mi->bank[0].start = PHYS_OFFSET; mi->bank[0].size = SMBA1002_MEM_SIZE - <API key>; } MACHINE_START(HARMONY, "harmony") .boot_params = 0x00000100, .fixup = tegra_smba_fixup, .map_io = tegra_map_common_io, .reserve = tegra_smba_reserve, .init_early = tegra_init_early, .init_irq = tegra_init_irq, .timer = &tegra_timer, .init_machine = tegra_smba_init, MACHINE_END
/** \file \ingroup u2w */ #include "WorldSocket.h" // must be first to make ACE happy with ACE includes in it #include "Common.h" #include "Database/DatabaseEnv.h" #include "Log.h" #include "Opcodes.h" #include "WorldPacket.h" #include "WorldSession.h" #include "Player.h" #include "ObjectMgr.h" #include "Group.h" #include "Guild.h" #include "World.h" #include "BattleGroundMgr.h" #include "MapManager.h" #include "SocialMgr.h" #include "Auth/AuthCrypt.h" #include "Auth/HMACSHA1.h" #include "zlib/zlib.h" // select opcodes appropriate for processing in Map::Update context for current session state static bool <API key>(WorldSession* session, OpcodeHandler const& opHandle) { // we do not process thread-unsafe packets if (opHandle.packetProcessing == <API key>) return false; // we do not process not loggined player packets Player * plr = session->GetPlayer(); if (!plr) return false; // in Map::Update() we do not process packets where player is not in world! return plr->IsInWorld(); } bool MapSessionFilter::Process(WorldPacket * packet) { OpcodeHandler const& opHandle = opcodeTable[packet->GetOpcode()]; if (opHandle.packetProcessing == PROCESS_INPLACE) return true; // let's check if our opcode can be really processed in Map::Update() return <API key>(m_pSession, opHandle); } // we should process ALL packets when player is not in world/logged in // OR packet handler is not thread-safe! bool WorldSessionFilter::Process(WorldPacket* packet) { OpcodeHandler const& opHandle = opcodeTable[packet->GetOpcode()]; // check if packet handler is supposed to be safe if (opHandle.packetProcessing == PROCESS_INPLACE) return true; // let's check if our opcode can't be processed in Map::Update() return !<API key>(m_pSession, opHandle); } WorldSession constructor WorldSession::WorldSession(uint32 id, WorldSocket *sock, AccountTypes sec, uint8 expansion, time_t mute_time, LocaleConstant locale) : <API key>(false), <API key>(false), m_muteTime(mute_time), _player(NULL), m_Socket(sock),_security(sec), _accountId(id), m_expansion(expansion), _logoutTime(0), m_inQueue(false), m_playerLoading(false), m_playerLogout(false), <API key>(false), m_playerSave(false), m_sessionDbcLocale(sWorld.<API key>(locale)), <API key>(sObjectMgr.GetIndexForLocale(locale)), m_latency(0), m_tutorialState(<API key>) { if (sock) { m_Address = sock->GetRemoteAddress (); sock->AddReference (); } } WorldSession destructor WorldSession::~WorldSession() { - unload player if not unloaded if (_player) LogoutPlayer (true); - If have unclosed socket, close it if (m_Socket) { m_Socket->CloseSocket (); m_Socket->RemoveReference (); m_Socket = NULL; } - empty incoming packet queue WorldPacket* packet; while(_recvQueue.next(packet)) delete packet; } void WorldSession::SizeError(WorldPacket const& packet, uint32 size) const { sLog.outError("Client (account %u) send packet %s (%u) with size " SIZEFMTD " but expected %u (attempt crash server?), skipped", GetAccountId(),LookupOpcodeName(packet.GetOpcode()),packet.GetOpcode(),packet.size(),size); } Get the player name char const* WorldSession::GetPlayerName() const { return GetPlayer() ? GetPlayer()->GetName() : "<none>"; } Send a packet to the client void WorldSession::SendPacket(WorldPacket const* packet) { if (!m_Socket) return; #ifdef MANGOS_DEBUG // Code for network use statistic static uint64 sendPacketCount = 0; static uint64 sendPacketBytes = 0; static time_t firstTime = time(NULL); static time_t lastTime = firstTime; // next 60 secs start time static uint64 sendLastPacketCount = 0; static uint64 sendLastPacketBytes = 0; time_t cur_time = time(NULL); if((cur_time - lastTime) < 60) { sendPacketCount+=1; sendPacketBytes+=packet->size(); sendLastPacketCount+=1; sendLastPacketBytes+=packet->size(); } else { uint64 minTime = uint64(cur_time - lastTime); uint64 fullTime = uint64(lastTime - firstTime); DETAIL_LOG("Send all time packets count: " UI64FMTD " bytes: " UI64FMTD " avr.count/sec: %f avr.bytes/sec: %f time: %u",sendPacketCount,sendPacketBytes,float(sendPacketCount)/fullTime,float(sendPacketBytes)/fullTime,uint32(fullTime)); DETAIL_LOG("Send last min packets count: " UI64FMTD " bytes: " UI64FMTD " avr.count/sec: %f avr.bytes/sec: %f",sendLastPacketCount,sendLastPacketBytes,float(sendLastPacketCount)/minTime,float(sendLastPacketBytes)/minTime); lastTime = cur_time; sendLastPacketCount = 1; sendLastPacketBytes = packet->wpos(); // wpos is real written size } #endif // !MANGOS_DEBUG if (m_Socket->SendPacket (*packet) == -1) m_Socket->CloseSocket (); } Add an incoming packet to the queue void WorldSession::QueuePacket(WorldPacket* new_packet) { _recvQueue.add(new_packet); } Logging helper for unexpected opcodes void WorldSession::LogUnexpectedOpcode(WorldPacket* packet, const char *reason) { sLog.outError( "SESSION: received unexpected opcode %s (0x%.4X) %s", LookupOpcodeName(packet->GetOpcode()), packet->GetOpcode(), reason); } Logging helper for unexpected opcodes void WorldSession::LogUnprocessedTail(WorldPacket *packet) { sLog.outError( "SESSION: opcode %s (0x%.4X) have unprocessed tail data (read stop at " SIZEFMTD " from " SIZEFMTD ")", LookupOpcodeName(packet->GetOpcode()), packet->GetOpcode(), packet->rpos(),packet->wpos()); } Update the WorldSession (triggered by World update) bool WorldSession::Update(uint32 diff, PacketFilter& updater) { - Retrieve packets from the receive queue and call the appropriate handlers not process packets if socket already closed WorldPacket* packet; while (m_Socket && !m_Socket->IsClosed() && _recvQueue.next(packet, updater)) { /*#if 1 sLog.outError( "MOEP: %s (0x%.4X)", LookupOpcodeName(packet->GetOpcode()), packet->GetOpcode()); #endif*/ OpcodeHandler const& opHandle = opcodeTable[packet->GetOpcode()]; try { switch (opHandle.status) { case STATUS_LOGGEDIN: if(!_player) { // skip STATUS_LOGGEDIN opcode unexpected errors if player logout sometime ago - this can be network lag delayed packets if(!<API key>) LogUnexpectedOpcode(packet, "the player has not logged in yet"); } else if(_player->IsInWorld()) ExecuteOpcode(opHandle, packet); // lag can cause STATUS_LOGGEDIN opcodes to arrive after the player started a transfer break; case <API key>: if(!_player && !<API key>) { LogUnexpectedOpcode(packet, "the player has not logged in yet and not recently logout"); } else // not expected _player or must checked in packet hanlder ExecuteOpcode(opHandle, packet); break; case STATUS_TRANSFER: if(!_player) LogUnexpectedOpcode(packet, "the player has not logged in yet"); else if(_player->IsInWorld()) LogUnexpectedOpcode(packet, "the player is still in world"); else ExecuteOpcode(opHandle, packet); break; case STATUS_AUTHED: // prevent cheating with skip queue wait if(m_inQueue) { LogUnexpectedOpcode(packet, "the player not pass queue yet"); break; } // single from authed time opcodes send in to after logout time // and before other <API key> opcodes. if (packet->GetOpcode() != <API key>) <API key> = false; ExecuteOpcode(opHandle, packet); break; case STATUS_NEVER: sLog.outError( "SESSION: received not allowed opcode %s (0x%.4X)", LookupOpcodeName(packet->GetOpcode()), packet->GetOpcode()); break; case STATUS_UNHANDLED: DEBUG_LOG("SESSION: received not handled opcode %s (0x%.4X)", LookupOpcodeName(packet->GetOpcode()), packet->GetOpcode()); break; default: sLog.outError("SESSION: received wrong-status-req opcode %s (0x%.4X)", LookupOpcodeName(packet->GetOpcode()), packet->GetOpcode()); break; } } catch (ByteBufferException &) { sLog.outError("WorldSession::Update ByteBufferException occured while parsing a packet (opcode: %u) from client %s, accountid=%i.", packet->GetOpcode(), GetRemoteAddress().c_str(), GetAccountId()); if (sLog.HasLogLevelOrHigher(LOG_LVL_DEBUG)) { sLog.outDebug("Dumping error causing packet:"); packet->hexlike(); } if (sWorld.getConfig(<API key>)) { DETAIL_LOG("Disconnecting session [account id %u / address %s] for badly formatted packet.", GetAccountId(), GetRemoteAddress().c_str()); KickPlayer(); } } delete packet; } - Cleanup socket pointer if need if (m_Socket && m_Socket->IsClosed ()) { m_Socket->RemoveReference (); m_Socket = NULL; } //check if we are safe to proceed with logout //logout procedure should happen only in World::UpdateSessions() method!!! if(updater.ProcessLogout()) { - If necessary, log the player out time_t currTime = time(NULL); if (!m_Socket || (ShouldLogOut(currTime) && !m_playerLoading)) LogoutPlayer(true); if (!m_Socket) return false; //Will remove this session from the world session map } return true; } %Log the player out void WorldSession::LogoutPlayer(bool Save) { // finish pending transfers before starting the logout while(_player && _player-><API key>()) <API key>(); m_playerLogout = true; m_playerSave = Save; if (_player) { sLog.outChar("Account: %d (IP: %s) Logout Character:[%s] (guid: %u)", GetAccountId(), GetRemoteAddress().c_str(), _player->GetName() ,_player->GetGUIDLow()); if (uint64 lguid = GetPlayer()->GetLootGUID()) DoLootRelease(lguid); - If the player just died before logging out, make him appear as a ghost //FIXME: logout must be delayed in case lost connection with client in time of combat if (_player->GetDeathTimer()) { _player-><API key>().deleteReferences(); _player->BuildPlayerRepop(); _player->RepopAtGraveyard(); } else if (!_player->getAttackers().empty()) { _player->CombatStop(); _player-><API key>().<API key>(false); _player-><API key>(); // build set of player who attack _player or who have pet attacking of _player std::set<Player*> aset; for(Unit::AttackerSet::const_iterator itr = _player->getAttackers().begin(); itr != _player->getAttackers().end(); ++itr) { Unit* owner = (*itr)->GetOwner(); // including player controlled case if(owner) { if(owner->GetTypeId()==TYPEID_PLAYER) aset.insert((Player*)owner); } else if((*itr)->GetTypeId()==TYPEID_PLAYER) aset.insert((Player*)(*itr)); } _player->SetPvPDeath(!aset.empty()); _player->KillPlayer(); _player->BuildPlayerRepop(); _player->RepopAtGraveyard(); // give honor to all attackers from set like group case for(std::set<Player*>::const_iterator itr = aset.begin(); itr != aset.end(); ++itr) (*itr)->RewardHonor(_player,aset.size()); // give bg rewards and update counters like kill by first from attackers // this can't be called for all attackers. if(!aset.empty()) if(BattleGround *bg = _player->GetBattleGround()) bg->HandleKillPlayer(_player,*aset.begin()); } else if(_player->HasAuraType(<API key>)) { // this will kill character by <API key> _player-><API key>(<API key>); //_player->SetDeathPvP(*); set at <API key> apply time _player->KillPlayer(); _player->BuildPlayerRepop(); _player->RepopAtGraveyard(); } //drop a flag if player is carrying it if(BattleGround *bg = _player->GetBattleGround()) bg-><API key>(_player); - Teleport to home if the player is in an invalid instance if(!_player->m_InstanceValid && !_player->isGameMaster()) { _player->TeleportToHomebind(); //this is a bad place to call for far teleport because we need player to be in world for successful logout //maybe we should implement delayed far teleport logout? } // FG: finish pending transfers after starting the logout // this should fix players beeing able to logout and login back with full hp at death position while(_player-><API key>()) <API key>(); for (int i=0; i < <API key>; ++i) { if(<API key> bgQueueTypeId = _player-><API key>(i)) { _player-><API key>(bgQueueTypeId); sBattleGroundMgr.<API key>[ bgQueueTypeId ].RemovePlayer(_player->GetObjectGuid(), true); } } - Reset the online field in the account table // no point resetting online in character table here as Player::SaveToDB() will set it to 1 since player has not been removed from world at this stage // No SQL injection as AccountID is uint32 LoginDatabase.PExecute("UPDATE account SET active_realm_id = 0 WHERE id = '%u'", GetAccountId()); - If the player is in a guild, update the guild roster and broadcast a logout message to other guild members if (Guild *guild = sObjectMgr.GetGuildById(_player->GetGuildId())) { if (MemberSlot* slot = guild->GetMemberSlot(_player->GetObjectGuid())) { slot->SetMemberStats(_player); slot->UpdateLogoutTime(); } guild->BroadcastEvent(GE_SIGNED_OFF, _player->GetGUID(), _player->GetName()); } - Remove pet _player->RemovePet(PET_SAVE_AS_CURRENT); - empty buyback items and save the player in the database // some save parts only correctly work in case player present in map/player_lists (pets, etc) if(Save) { uint32 eslot; for(int j = BUYBACK_SLOT_START; j < BUYBACK_SLOT_END; ++j) { eslot = j - BUYBACK_SLOT_START; _player->SetUInt64Value(<API key> + (eslot * 2), 0); _player->SetUInt32Value(<API key> + eslot, 0); _player->SetUInt32Value(<API key> + eslot, 0); } _player->SaveToDB(); } - Leave all channels before player delete... _player->CleanupChannels(); - If the player is in a group (or invited), remove him. If the group if then only 1 person, disband the group. _player->UninviteFromGroup(); // remove player from the group if he is: // a) in group; b) not in raid group; c) logging out normally (not being kicked or disconnected) if(_player->GetGroup() && !_player->GetGroup()->isRaidGroup() && m_Socket) _player->RemoveFromGroup(); - Send update to group if(_player->GetGroup()) _player->GetGroup()->SendUpdate(); - Broadcast a logout message to the player's friends sSocialMgr.SendFriendStatus(_player, FRIEND_OFFLINE, _player->GetObjectGuid(), true); sSocialMgr.RemovePlayerSocial (_player->GetGUIDLow ()); - Remove the player from the world // the player may not be in the world when logging out // e.g if he got disconnected during a transfer to another map // calls to GetMap in this case may cause crashes Map* _map = _player->GetMap(); _map->Remove(_player, true); SetPlayer(NULL); // deleted in Remove call - Send the 'logout complete' packet to the client WorldPacket data( <API key>, 0 ); SendPacket( &data ); - Since each account can only have one online character at any given time, ensure all characters for active account are marked as offline //No SQL injection as AccountId is uint32 CharacterDatabase.PExecute("UPDATE characters SET online = 0 WHERE account = '%u'", GetAccountId()); DEBUG_LOG( "SESSION: Sent <API key> Message" ); } m_playerLogout = false; m_playerSave = false; <API key> = true; LogoutRequest(0); } Kick a player out of the World void WorldSession::KickPlayer() { if (m_Socket) m_Socket->CloseSocket (); } Cancel channeling handler void WorldSession::<API key>(const char* Text, ...) { va_list ap; char szStr [1024]; szStr[0] = '\0'; va_start(ap, Text); vsnprintf( szStr, 1024, Text, ap ); va_end(ap); uint32 length = strlen(szStr)+1; WorldPacket data(<API key>, 4+length); data << length; data << szStr; SendPacket(&data); } void WorldSession::SendNotification(const char *format,...) { if(format) { va_list ap; char szStr [1024]; szStr[0] = '\0'; va_start(ap, format); vsnprintf( szStr, 1024, format, ap ); va_end(ap); WorldPacket data(SMSG_NOTIFICATION, (strlen(szStr)+1)); data << szStr; SendPacket(&data); } } void WorldSession::SendNotification(int32 string_id,...) { char const* format = GetMangosString(string_id); if(format) { va_list ap; char szStr [1024]; szStr[0] = '\0'; va_start(ap, string_id); vsnprintf( szStr, 1024, format, ap ); va_end(ap); WorldPacket data(SMSG_NOTIFICATION, (strlen(szStr)+1)); data << szStr; SendPacket(&data); } } void WorldSession::SendSetPhaseShift(uint32 PhaseShift) { WorldPacket data(<API key>, 4); data << uint32(PhaseShift); SendPacket(&data); } const char * WorldSession::GetMangosString( int32 entry ) const { return sObjectMgr.GetMangosString(entry,<API key>()); } void WorldSession::Handle_NULL( WorldPacket& recvPacket ) { DEBUG_LOG("SESSION: received unimplemented opcode %s (0x%.4X)", LookupOpcodeName(recvPacket.GetOpcode()), recvPacket.GetOpcode()); } void WorldSession::<API key>( WorldPacket& recvPacket ) { sLog.outError( "SESSION: received opcode %s (0x%.4X) that must be processed in WorldSocket::OnRead", LookupOpcodeName(recvPacket.GetOpcode()), recvPacket.GetOpcode()); } void WorldSession::Handle_ServerSide( WorldPacket& recvPacket ) { sLog.outError("SESSION: received server-side opcode %s (0x%.4X)", LookupOpcodeName(recvPacket.GetOpcode()), recvPacket.GetOpcode()); } void WorldSession::Handle_Deprecated( WorldPacket& recvPacket ) { sLog.outError( "SESSION: received deprecated opcode %s (0x%.4X)", LookupOpcodeName(recvPacket.GetOpcode()), recvPacket.GetOpcode()); } void WorldSession::SendAuthWaitQue(uint32 position) { if(position == 0) { WorldPacket packet( SMSG_AUTH_RESPONSE, 1 ); packet << uint8( AUTH_OK ); SendPacket(&packet); } else { WorldPacket packet( SMSG_AUTH_RESPONSE, 1+4+1 ); packet << uint8(AUTH_WAIT_QUEUE); packet << uint32(position); packet << uint8(0); // unk 3.3.0 SendPacket(&packet); } } void WorldSession::<API key>() { LoadAccountData( CharacterDatabase.PQuery("SELECT type, time, data FROM account_data WHERE account='%u'", GetAccountId()), GLOBAL_CACHE_MASK ); } void WorldSession::LoadAccountData(QueryResult* result, uint32 mask) { for (uint32 i = 0; i < <API key>; ++i) if (mask & (1 << i)) m_accountData[i] = AccountData(); if(!result) return; do { Field *fields = result->Fetch(); uint32 type = fields[0].GetUInt32(); if (type >= <API key>) { sLog.outError("Table `%s` have invalid account data type (%u), ignore.", mask == GLOBAL_CACHE_MASK ? "account_data" : "<API key>", type); continue; } if ((mask & (1 << type))==0) { sLog.outError("Table `%s` have non appropriate for table account data type (%u), ignore.", mask == GLOBAL_CACHE_MASK ? "account_data" : "<API key>", type); continue; } m_accountData[type].Time = time_t(fields[1].GetUInt64()); m_accountData[type].Data = fields[2].GetCppString(); } while (result->NextRow()); delete result; } void WorldSession::SetAccountData(AccountDataType type, time_t time_, std::string data) { if ((1 << type) & GLOBAL_CACHE_MASK) { uint32 acc = GetAccountId(); CharacterDatabase.BeginTransaction (); CharacterDatabase.PExecute("DELETE FROM account_data WHERE account='%u' AND type='%u'", acc, type); std::string safe_data = data; CharacterDatabase.escape_string(safe_data); CharacterDatabase.PExecute("INSERT INTO account_data VALUES ('%u','%u','" UI64FMTD "','%s')", acc, type, uint64(time_), safe_data.c_str()); CharacterDatabase.CommitTransaction (); } else { // _player can be NULL and packet received after logout but m_GUID still store correct guid if(!m_GUIDLow) return; CharacterDatabase.BeginTransaction (); CharacterDatabase.PExecute("DELETE FROM <API key> WHERE guid='%u' AND type='%u'", m_GUIDLow, type); std::string safe_data = data; CharacterDatabase.escape_string(safe_data); CharacterDatabase.PExecute("INSERT INTO <API key> VALUES ('%u','%u','" UI64FMTD "','%s')", m_GUIDLow, type, uint64(time_), safe_data.c_str()); CharacterDatabase.CommitTransaction (); } m_accountData[type].Time = time_; m_accountData[type].Data = data; } void WorldSession::<API key>(uint32 mask) { WorldPacket data( <API key>, 4+1+4+8*4 ); // changed in WotLK data << uint32(time(NULL)); // unix time of something data << uint8(1); data << uint32(mask); // type mask for(uint32 i = 0; i < <API key>; ++i) if(mask & (1 << i)) data << uint32(GetAccountData(AccountDataType(i))->Time);// also unix time SendPacket(&data); } void WorldSession::LoadTutorialsData() { for ( int aX = 0 ; aX < 8 ; ++aX ) m_Tutorials[ aX ] = 0; QueryResult *result = CharacterDatabase.PQuery("SELECT tut0,tut1,tut2,tut3,tut4,tut5,tut6,tut7 FROM character_tutorial WHERE account = '%u'", GetAccountId()); if(!result) { m_tutorialState = TUTORIALDATA_NEW; return; } do { Field *fields = result->Fetch(); for (int iI = 0; iI < 8; ++iI) m_Tutorials[iI] = fields[iI].GetUInt32(); } while( result->NextRow() ); delete result; m_tutorialState = <API key>; } void WorldSession::SendTutorialsData() { WorldPacket data(SMSG_TUTORIAL_FLAGS, 4*8); for(uint32 i = 0; i < 8; ++i) data << m_Tutorials[i]; SendPacket(&data); } void WorldSession::SaveTutorialsData() { switch(m_tutorialState) { case <API key>: CharacterDatabase.PExecute("UPDATE character_tutorial SET tut0='%u', tut1='%u', tut2='%u', tut3='%u', tut4='%u', tut5='%u', tut6='%u', tut7='%u' WHERE account = '%u'", m_Tutorials[0], m_Tutorials[1], m_Tutorials[2], m_Tutorials[3], m_Tutorials[4], m_Tutorials[5], m_Tutorials[6], m_Tutorials[7], GetAccountId()); break; case TUTORIALDATA_NEW: CharacterDatabase.PExecute("INSERT INTO character_tutorial (account,tut0,tut1,tut2,tut3,tut4,tut5,tut6,tut7) VALUES ('%u', '%u', '%u', '%u', '%u', '%u', '%u', '%u', '%u')", GetAccountId(), m_Tutorials[0], m_Tutorials[1], m_Tutorials[2], m_Tutorials[3], m_Tutorials[4], m_Tutorials[5], m_Tutorials[6], m_Tutorials[7]); break; case <API key>: break; } m_tutorialState = <API key>; } void WorldSession::ReadAddonsInfo(WorldPacket &data) { if (data.rpos() + 4 > data.size()) return; uint32 size; data >> size; if(!size) return; if(size > 0xFFFFF) { sLog.outError("WorldSession::ReadAddonsInfo addon info too big, size %u", size); return; } uLongf uSize = size; uint32 pos = data.rpos(); ByteBuffer addonInfo; addonInfo.resize(size); if (uncompress(const_cast<uint8*>(addonInfo.contents()), &uSize, const_cast<uint8*>(data.contents() + pos), data.size() - pos) == Z_OK) { uint32 addonsCount; addonInfo >> addonsCount; // addons count for(uint32 i = 0; i < addonsCount; ++i) { std::string addonName; uint8 enabled; uint32 crc, unk1; // check next addon data format correctness if(addonInfo.rpos()+1 > addonInfo.size()) return; addonInfo >> addonName; addonInfo >> enabled >> crc >> unk1; DEBUG_LOG("ADDON: Name: %s, Enabled: 0x%x, CRC: 0x%x, Unknown2: 0x%x", addonName.c_str(), enabled, crc, unk1); m_addonsList.push_back(AddonInfo(addonName, enabled, crc)); } uint32 unk2; addonInfo >> unk2; if(addonInfo.rpos() != addonInfo.size()) DEBUG_LOG("packet under read!"); } else sLog.outError("Addon packet uncompress error!"); } void WorldSession::SendAddonsInfo() { unsigned char tdata[256] = { 0xC3, 0x5B, 0x50, 0x84, 0xB9, 0x3E, 0x32, 0x42, 0x8C, 0xD0, 0xC7, 0x48, 0xFA, 0x0E, 0x5D, 0x54, 0x5A, 0xA3, 0x0E, 0x14, 0xBA, 0x9E, 0x0D, 0xB9, 0x5D, 0x8B, 0xEE, 0xB6, 0x84, 0x93, 0x45, 0x75, 0xFF, 0x31, 0xFE, 0x2F, 0x64, 0x3F, 0x3D, 0x6D, 0x07, 0xD9, 0x44, 0x9B, 0x40, 0x85, 0x59, 0x34, 0x4E, 0x10, 0xE1, 0xE7, 0x43, 0x69, 0xEF, 0x7C, 0x16, 0xFC, 0xB4, 0xED, 0x1B, 0x95, 0x28, 0xA8, 0x23, 0x76, 0x51, 0x31, 0x57, 0x30, 0x2B, 0x79, 0x08, 0x50, 0x10, 0x1C, 0x4A, 0x1A, 0x2C, 0xC8, 0x8B, 0x8F, 0x05, 0x2D, 0x22, 0x3D, 0xDB, 0x5A, 0x24, 0x7A, 0x0F, 0x13, 0x50, 0x37, 0x8F, 0x5A, 0xCC, 0x9E, 0x04, 0x44, 0x0E, 0x87, 0x01, 0xD4, 0xA3, 0x15, 0x94, 0x16, 0x34, 0xC6, 0xC2, 0xC3, 0xFB, 0x49, 0xFE, 0xE1, 0xF9, 0xDA, 0x8C, 0x50, 0x3C, 0xBE, 0x2C, 0xBB, 0x57, 0xED, 0x46, 0xB9, 0xAD, 0x8B, 0xC6, 0xDF, 0x0E, 0xD6, 0x0F, 0xBE, 0x80, 0xB3, 0x8B, 0x1E, 0x77, 0xCF, 0xAD, 0x22, 0xCF, 0xB7, 0x4B, 0xCF, 0xFB, 0xF0, 0x6B, 0x11, 0x45, 0x2D, 0x7A, 0x81, 0x18, 0xF2, 0x92, 0x7E, 0x98, 0x56, 0x5D, 0x5E, 0x69, 0x72, 0x0A, 0x0D, 0x03, 0x0A, 0x85, 0xA2, 0x85, 0x9C, 0xCB, 0xFB, 0x56, 0x6E, 0x8F, 0x44, 0xBB, 0x8F, 0x02, 0x22, 0x68, 0x63, 0x97, 0xBC, 0x85, 0xBA, 0xA8, 0xF7, 0xB5, 0x40, 0x68, 0x3C, 0x77, 0x86, 0x6F, 0x4B, 0xD7, 0x88, 0xCA, 0x8A, 0xD7, 0xCE, 0x36, 0xF0, 0x45, 0x6E, 0xD5, 0x64, 0x79, 0x0F, 0x17, 0xFC, 0x64, 0xDD, 0x10, 0x6F, 0xF3, 0xF5, 0xE0, 0xA6, 0xC3, 0xFB, 0x1B, 0x8C, 0x29, 0xEF, 0x8E, 0xE5, 0x34, 0xCB, 0xD1, 0x2A, 0xCE, 0x79, 0xC3, 0x9A, 0x0D, 0x36, 0xEA, 0x01, 0xE0, 0xAA, 0x91, 0x20, 0x54, 0xF0, 0x72, 0xD8, 0x1E, 0xC7, 0x89, 0xD2 }; WorldPacket data(SMSG_ADDON_INFO, 4); for(AddonsList::iterator itr = m_addonsList.begin(); itr != m_addonsList.end(); ++itr) { uint8 state = 2; // 2 is sent here data << uint8(state); uint8 unk1 = 1; // 1 is sent here data << uint8(unk1); if (unk1) { uint8 unk2 = (itr->CRC != 0x4c1c776d); // If addon is Standard addon CRC data << uint8(unk2); // if 1, than add addon public signature if (unk2) // if CRC is wrong, add public key (client need it) data.append(tdata, sizeof(tdata)); data << uint32(0); } uint8 unk3 = 0; // 0 is sent here data << uint8(unk3); // use <Addon>\<Addon>.url file or not if (unk3) { // String, 256 (null terminated?) data << uint8(0); } } m_addonsList.clear(); uint32 count = 0; data << uint32(count); // BannedAddons count /*for(uint32 i = 0; i < count; ++i) { uint32 string (16 bytes) string (16 bytes) uint32 uint32 uint32 }*/ SendPacket(&data); } void WorldSession::SetPlayer( Player *plr ) { _player = plr; // set m_GUID that can be used while player loggined and later until <API key> not reset if(_player) m_GUIDLow = _player->GetGUIDLow(); } void WorldSession::SendRedirectClient(std::string& ip, uint16 port) { uint32 ip2 = ACE_OS::inet_addr(ip.c_str()); WorldPacket pkt(<API key>, 4 + 2 + 4 + 20); pkt << uint32(ip2); // inet_addr(ipstr) pkt << uint16(port); // port pkt << uint32(GetLatency()); // latency-related? HMACSHA1 sha1(20, m_Socket->GetSessionKey().AsByteArray()); sha1.UpdateData((uint8*)&ip2, 4); sha1.UpdateData((uint8*)&port, 2); sha1.Finalize(); pkt.append(sha1.GetDigest(), 20); // hmacsha1(ip+port) w/ sessionkey as seed SendPacket(&pkt); } void WorldSession::ExecuteOpcode( OpcodeHandler const& opHandle, WorldPacket* packet ) { // need prevent do internal far teleports in handlers because some handlers do lot steps // or call code that can do far teleports in some conditions unexpectedly for generic way work code if (_player) _player->SetCanDelayTeleport(true); (this->*opHandle.handler)(*packet); if (_player) { // can be not set in fact for login opcode, but this not create porblems. _player->SetCanDelayTeleport(false); //we should execute delayed teleports only for alive(!) players //because we don't want player's ghost teleported from graveyard if (_player-><API key>()) _player->TeleportTo(_player->m_teleport_dest, _player->m_teleport_options); } if (packet->rpos() < packet->wpos() && sLog.HasLogLevelOrHigher(LOG_LVL_DEBUG)) LogUnprocessedTail(packet); }
function ValidarPuntaje(id) { var aux = id.split("_"); var name=aux[0]; var fil=parseInt(aux[1]); var col=parseInt(aux[2]); var colpuntaje=col; var colpuntajereal=col+1; var puntaje=name+"_"+fil+"_"+colpuntaje; var puntajereal=name+"_"+fil+"_"+colpuntajereal; var num1=toFloat(puntaje); var num2=toFloat(puntajereal); if (num1>num2) { alert("El puntaje introducido no puede ser mayor al definido: "+$(puntajereal).value); $(puntaje).value="0.00"; } } function totalizar() { var monrec=toFloat('cobdocume_recdoc'); var dscdoc=toFloat('cobdocume_dscdoc'); var abodoc=toFloat('cobdocume_abodoc'); var mondoc=toFloat('cobdocume_mondoc'); var tototal= mondoc+monrec-dscdoc+abodoc; $('cobdocume_saldoc').value=format(tototal.toFixed(2),'.',',','.'); }
package de.unihd.dbs.heideltime.standalone.components.impl; import java.io.<API key>; import java.util.ArrayList; import java.util.List; import java.util.regex.MatchResult; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.uima.cas.impl.XmiCasSerializer; import org.apache.uima.jcas.JCas; import org.apache.uima.util.XMLSerializer; import de.unihd.dbs.heideltime.standalone.components.ResultFormatter; /** * Result formatter based on XMI. * * @see {@link org.apache.uima.examples.xmi.<API key>} * * @author Andreas Fay, University of Heidelberg * @version 1.0 */ public class XMIResultFormatter implements ResultFormatter { @Override public String format(JCas jcas) throws Exception { <API key> outStream = null; try { // Write XMI outStream = new <API key>(); XmiCasSerializer ser = new XmiCasSerializer(jcas.getTypeSystem()); XMLSerializer xmlSer = new XMLSerializer(outStream, false); ser.serialize(jcas.getCas(), xmlSer.getContentHandler()); // Convert output stream to string // String newOut = outStream.toString("UTF-8"); String newOut = outStream.toString(); // System.err.println("NEWOUT:"+newOut); // if (newOut.matches("^<\\?xml version=\"1.0\" encoding=\"UTF-8\"\\?>.*$")){ // newOut = newOut.replaceFirst("<\\?xml version=\"1.0\" encoding=\"UTF-8\"\\?>", // "<\\?xml version=\"1.0\" encoding=\""+Charset.defaultCharset().name()+"\"\\?>"); // if (newOut.matches("^.*?sofaString=\"(.*?)\".*$")){ // for (MatchResult r : findMatches(Pattern.compile("^(.*?sofaString=\")(.*?)(\".*)$"), newOut)){ // String stringBegin = r.group(1); // String sofaString = r.group(2); // System.err.println("SOFASTRING:"+sofaString); // String stringEnd = r.group(3); // // The sofaString is encoded as UTF-8. // // However, at this point it has to be translated back into the defaultCharset. // byte[] defaultDocText = new String(sofaString.getBytes(), "UTF-8").getBytes(Charset.defaultCharset().name()); // String docText = new String(defaultDocText); // System.err.println("DOCTEXT:"+docText); // newOut = stringBegin + docText + stringEnd; / newOut = newOut.replaceFirst("sofaString=\".*?\"", "sofaString=\"" + docText + "\""); // System.err.println("NEWOUT:"+newOut); return newOut; } finally { if (outStream != null) { outStream.close(); } } } /** * Find all the matches of a pattern in a charSequence and return the * results as list. * * @param pattern * @param s * @return */ public static Iterable<MatchResult> findMatches(Pattern pattern, CharSequence s) { List<MatchResult> results = new ArrayList<MatchResult>(); for (Matcher m = pattern.matcher(s); m.find();) results.add(m.toMatchResult()); return results; } }
#include "../inc/MarlinConfig.h" #if ENABLED(SDSUPPORT) #include "SdFile.h" /** * Create a file object and open it in the current working directory. * * \param[in] path A path with a valid 8.3 DOS name for a file to be opened. * * \param[in] oflag Values for \a oflag are constructed by a bitwise-inclusive * OR of open flags. see SdBaseFile::open(SdBaseFile*, const char*, uint8_t). */ SdFile::SdFile(const char* path, uint8_t oflag) : SdBaseFile(path, oflag) { } /** * Write data to an open file. * * \note Data is moved to the cache but may not be written to the * storage device until sync() is called. * * \param[in] buf Pointer to the location of the data to be written. * * \param[in] nbyte Number of bytes to write. * * \return For success write() returns the number of bytes written, always * \a nbyte. If an error occurs, write() returns -1. Possible errors * include write() is called before a file has been opened, write is called * for a read-only file, device is full, a corrupt file system or an I/O error. * */ int16_t SdFile::write(const void* buf, uint16_t nbyte) { return SdBaseFile::write(buf, nbyte); } /** * Write a byte to a file. Required by the Arduino Print class. * \param[in] b the byte to be written. * Use writeError to check for errors. */ #if ARDUINO >= 100 size_t SdFile::write(uint8_t b) { return SdBaseFile::write(&b, 1); } #else void SdFile::write(uint8_t b) { SdBaseFile::write(&b, 1); } #endif /** * Write a string to a file. Used by the Arduino Print class. * \param[in] str Pointer to the string. * Use writeError to check for errors. */ void SdFile::write(const char* str) { SdBaseFile::write(str, strlen(str)); } /** * Write a PROGMEM string to a file. * \param[in] str Pointer to the PROGMEM string. * Use writeError to check for errors. */ void SdFile::write_P(PGM_P str) { for (uint8_t c; (c = pgm_read_byte(str)); str++) write(c); } /** * Write a PROGMEM string followed by CR/LF to a file. * \param[in] str Pointer to the PROGMEM string. * Use writeError to check for errors. */ void SdFile::writeln_P(PGM_P str) { write_P(str); write_P(PSTR("\r\n")); } #endif // SDSUPPORT
<html><body>Karuda:<br> You can earn the following rewards:<br> <font color="LEVEL">S80 weapon recipe</font> - Requires 500 Cursed Grave Goods<br> <font color="LEVEL">Leonard</font> - Requires 8 Cursed Grave Goods<br> <font color="LEVEL">Adamantine</font> - Requires 15 Cursed Grave Goods<br> <font color="LEVEL">Orichalcum</font> - Requires 12 Cursed Grave Goods<br> Remember, I'm counting on you! </body></html>
#include "clipboard_windows.h" #include <QMimeData> RTF::Clipboard::Clipboard() : QWinMime() { CF_RTF = QWinMime::registerMimeType(QLatin1String("Rich Text Format")); } bool RTF::Clipboard::canConvertFromMime(const FORMATETC& format, const QMimeData* mime_data) const { return (format.cfFormat == CF_RTF) && mime_data->hasFormat(QLatin1String("text/rtf")); } bool RTF::Clipboard::canConvertToMime(const QString& mime_type, IDataObject* data_obj) const { bool result = false; if (mime_type == QLatin1String("text/rtf")) { FORMATETC format = initFormat(); format.tymed |= TYMED_ISTREAM; result = (data_obj->QueryGetData(&format) == S_OK); } return result; } bool RTF::Clipboard::convertFromMime(const FORMATETC& format, const QMimeData* mime_data, STGMEDIUM* storage_medium) const { if (canConvertFromMime(format, mime_data)) { QByteArray data = mime_data->data(QLatin1String("text/rtf")); HANDLE data_handle = GlobalAlloc(0, data.size()); if (!data_handle) { return false; } void* data_ptr = GlobalLock(data_handle); memcpy(data_ptr, data.data(), data.size()); GlobalUnlock(data_handle); storage_medium->tymed = TYMED_HGLOBAL; storage_medium->hGlobal = data_handle; storage_medium->pUnkForRelease = NULL; return true; } return false; } QVariant RTF::Clipboard::convertToMime(const QString& mime_type, IDataObject* data_obj, QVariant::Type preferred_type) const { Q_UNUSED(preferred_type); QVariant result; if (canConvertToMime(mime_type, data_obj)) { QByteArray data; FORMATETC format = initFormat(); format.tymed |= TYMED_ISTREAM; STGMEDIUM storage_medium; if (data_obj->GetData(&format, &storage_medium) == S_OK) { if (storage_medium.tymed == TYMED_HGLOBAL) { char* data_ptr = reinterpret_cast<char*>(GlobalLock(storage_medium.hGlobal)); data = QByteArray::fromRawData(data_ptr, GlobalSize(storage_medium.hGlobal)); data.detach(); GlobalUnlock(storage_medium.hGlobal); } else if (storage_medium.tymed == TYMED_ISTREAM) { char buffer[4096]; ULONG amount_read = 0; LARGE_INTEGER pos = {{0, 0}}; HRESULT stream_result = storage_medium.pstm->Seek(pos, STREAM_SEEK_SET, NULL); while (SUCCEEDED(stream_result)) { stream_result = storage_medium.pstm->Read(buffer, sizeof(buffer), &amount_read); if (SUCCEEDED(stream_result) && (amount_read > 0)) { data += QByteArray::fromRawData(buffer, amount_read); } if (amount_read != sizeof(buffer)) { break; } } data.detach(); } ReleaseStgMedium(&storage_medium); } if (!data.isEmpty()) { result = data; } } return result; } QVector<FORMATETC> RTF::Clipboard::formatsForMime(const QString& mime_type, const QMimeData* mime_data) const { QVector<FORMATETC> result; if ((mime_type == QLatin1String("text/rtf")) && mime_data->hasFormat(QLatin1String("text/rtf"))) { result += initFormat(); } return result; } QString RTF::Clipboard::mimeForFormat(const FORMATETC& format) const { if (format.cfFormat == CF_RTF) { return QLatin1String("text/rtf"); } return QString(); } FORMATETC RTF::Clipboard::initFormat() const { FORMATETC format; format.cfFormat = CF_RTF; format.ptd = NULL; format.dwAspect = DVASPECT_CONTENT; format.lindex = -1; format.tymed = TYMED_HGLOBAL; return format; }
<?php defined('PHALAPI_INSTALL') || die('no access'); ?> <!DOCTYPE html> <html lang="zh-CN"> <head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="viewport" content="width=device-width, initial-scale=1"> <meta name="description" content=""> <meta name="author" content=""> <link rel="icon" href="http://webtools.qiniudn.com/dog_catch.png"> <title> - PhalApi</title> <link href="./static/css/pintuer.css" rel="stylesheet"> <!-- Just for debugging purposes. Don't actually copy these 2 lines! --> <!--[if lt IE 9]><script src="../../assets/js/<API key>.js"></script><![endif]--> <!-- <script src="../../assets/js/<API key>.js"></script> --> <!-- HTML5 shim and Respond.js for IE8 support of HTML5 elements and media queries --> <!--[if lt IE 9]> <script src="//cdn.bootcss.com/html5shiv/3.7.2/html5shiv.min.js"></script> <script src="//cdn.bootcss.com/respond.js/1.4.2/respond.min.js"></script> <![endif] </head> <style> body{ background-color:#333; color: #fff; } .window{ height: auto; margin: 0px auto;margin-top: 50px } .window_big{ width: 800px; } .window_small{ width: 600px; } .window_title{ border-radius: 4px 4px 0px 0px;padding: 20px; } .t_normal{ background-color: #FCB244 !important; } .t_error{ background-color: #DE4E4E } .t_success{ background-color: #7AC997 } .footer{text-align: center;color: #333;} </style> <body> <div class="container">
#pragma once #include "../scenario/Scenario.h" #include "Object.h" class StexObject final : public Object { private: rct_stex_entry _legacyType = {}; public: explicit StexObject(const rct_object_entry& entry) : Object(entry) { } void* GetLegacyData() override { return &_legacyType; } void ReadLegacy(IReadObjectContext* context, IStream* stream) override; void Load() override; void Unload() override; void DrawPreview(rct_drawpixelinfo* dpi, int32_t width, int32_t height) const override; std::string GetName() const override; std::string GetScenarioName() const; std::string GetScenarioDetails() const; std::string GetParkName() const; };
#include "io/Blast.h" #include <cstring> #include <cstdlib> #include <exception> #include "io/log/Logger.h" #define MAXBITS 13 /* maximum code length */ #define MAXWIN 4096 /* maximum window size */ namespace { struct <API key> : public std::exception { }; } // anonymous namespace /* input and output state */ struct state { /* input state */ blast_in infun; /* input function provided by user */ void * inhow; /* opaque information passed to infun() */ const unsigned char * in; /* next input location */ unsigned left; /* available input at in */ int bitbuf; /* bit buffer */ int bitcnt; /* number of bits in bit buffer */ /* output state */ blast_out outfun; /* output function provided by user */ void * outhow; /* opaque information passed to outfun() */ unsigned next; /* index of next write location in out[] */ int first; /* true to check distances (for first 4K) */ unsigned char out[MAXWIN]; /* output buffer and sliding window */ }; /* * Return need bits from the input stream. This always leaves less than * eight bits in the buffer. bits() works properly for need == 0. * * Format notes: * * - Bits are stored in bytes from the least significant bit to the most * significant bit. Therefore bits are dropped from the bottom of the bit * buffer, using shift right, and new bytes are appended to the top of the * bit buffer, using shift left. */ static int bits(state * s, int need) { int val; /* bit accumulator */ /* load at least need bits into val */ val = s->bitbuf; while(s->bitcnt < need) { if(s->left == 0) { s->left = s->infun(s->inhow, &(s->in)); if (s->left == 0) throw <API key>(); /* out of input */ } val |= int(*(s->in)++) << s->bitcnt; /* load eight bits */ s->left s->bitcnt += 8; } /* drop need bits and update buffer, always zero to seven bits left */ s->bitbuf = val >> need; s->bitcnt -= need; /* return need bits, zeroing the bits above that */ return val & ((1 << need) - 1); } /* * Huffman code decoding tables. count[1..MAXBITS] is the number of symbols of * each length, which for a canonical code are stepped through in order. * symbol[] are the symbol values in canonical order, where the number of * entries is the sum of the counts in count[]. The decoding process can be * seen in the function decode() below. */ struct huffman { short * count; /* number of symbols of each length */ short * symbol; /* canonically ordered symbols */ }; /* * Decode a code from the stream s using huffman table h. Return the symbol or * a negative value if there is an error. If all of the lengths are zero, i.e. * an empty code, or if the code is incomplete and an invalid code is received, * then -9 is returned after reading MAXBITS bits. * * Format notes: * * - The codes as stored in the compressed data are bit-reversed relative to * a simple integer ordering of codes of the same lengths. Hence below the * bits are pulled from the compressed data one at a time and used to * build the code value reversed from what is in the stream in order to * permit simple integer comparisons for decoding. * * - The first code for the shortest length is all ones. Subsequent codes of * the same length are simply integer decrements of the previous code. When * moving up a length, a one bit is appended to the code. For a complete * code, the last code of the longest length will be all zeros. To support * this ordering, the bits pulled during decoding are inverted to apply the * more "natural" ordering starting with all zeros and incrementing. */ static int decode(state * s, huffman * h) { int len; /* current number of bits in code */ int code; /* len bits being decoded */ int first; /* first code of length len */ int count; /* number of codes of length len */ int index; /* index of first code of length len in symbol table */ int bitbuf; /* bits from stream */ int left; /* bits left in next or left to process */ short * next; /* next number of codes */ bitbuf = s->bitbuf; left = s->bitcnt; code = first = index = 0; len = 1; next = h->count + 1; while(true) { while(left code |= (bitbuf & 1) ^ 1; /* invert code */ bitbuf >>= 1; count = *next++; if(code < first + count) { /* if length len, return symbol */ s->bitbuf = bitbuf; s->bitcnt = (s->bitcnt - len) & 7; return h->symbol[index + (code - first)]; } index += count; /* else update for next length */ first += count; first <<= 1; code <<= 1; len++; } left = (MAXBITS + 1) - len; if(left == 0) break; if(s->left == 0) { s->left = s->infun(s->inhow, &(s->in)); if (s->left == 0) throw <API key>(); /* out of input */ } bitbuf = *(s->in)++; s->left if (left > 8) left = 8; } return -9; /* ran out of codes */ } /* * Given a list of repeated code lengths rep[0..n-1], where each byte is a * count (high four bits + 1) and a code length (low four bits), generate the * list of code lengths. This compaction reduces the size of the object code. * Then given the list of code lengths length[0..n-1] representing a canonical * Huffman code for n symbols, construct the tables required to decode those * codes. Those tables are the number of codes of each length, and the symbols * sorted by length, retaining their original order within each length. The * return value is zero for a complete code set, negative for an over- * subscribed code set, and positive for an incomplete code set. The tables * can be used if the return value is zero or positive, but they cannot be used * if the return value is negative. If the return value is zero, it is not * possible for decode() using that table to return an error--any stream of * enough bits will resolve to a symbol. If the return value is positive, then * it is possible for decode() using that table to return an error for received * codes past the end of the incomplete lengths. */ static int construct(huffman * h, const unsigned char * rep, int n) { int symbol; /* current symbol when stepping through length[] */ int len; /* current length when stepping through h->count[] */ int left; /* number of possible codes left of current length */ short offs[MAXBITS + 1]; /* offsets in symbol table for each length */ short length[256]; /* code lengths */ /* convert compact repeat counts into symbol bit length list */ symbol = 0; do { len = *rep++; left = (len >> 4) + 1; len &= 15; do { length[symbol++] = len; } while(--left); } while(--n); n = symbol; /* count number of codes of each length */ for(len = 0; len <= MAXBITS; len++) { h->count[len] = 0; } for(symbol = 0; symbol < n; symbol++) { (h->count[length[symbol]])++; /* assumes lengths are within bounds */ } if(h->count[0] == n) { /* no codes! */ return 0; /* complete, but decode() will fail */ } /* check for an over-subscribed or incomplete set of lengths */ left = 1; /* one possible code of zero length */ for(len = 1; len <= MAXBITS; len++) { left <<= 1; /* one more bit, double codes left */ left -= h->count[len]; /* deduct count from possible codes */ if(left < 0) return left; /* <API key> negative */ } /* left > 0 means incomplete */ /* generate offsets into symbol table for each length for sorting */ offs[1] = 0; for(len = 1; len < MAXBITS; len++) { offs[len + 1] = offs[len] + h->count[len]; } /* * put symbols in table sorted by length, by symbol order within each * length */ for(symbol = 0; symbol < n; symbol++) { if(length[symbol] != 0) { h->symbol[offs[length[symbol]]++] = symbol; } } /* return zero for complete set, positive for incomplete set */ return left; } /* * Decode PKWare Compression Library stream. * * Format notes: * * - First byte is 0 if literals are uncoded or 1 if they are coded. Second * byte is 4, 5, or 6 for the number of extra bits in the distance code. * This is the base-2 logarithm of the dictionary size minus six. * * - Compressed data is a combination of literals and length/distance pairs * terminated by an end code. Literals are either Huffman coded or * uncoded bytes. A length/distance pair is a coded length followed by a * coded distance to represent a string that occurs earlier in the * uncompressed data that occurs again at the current location. * * - A bit preceding a literal or length/distance pair indicates which comes * next, 0 for literals, 1 for length/distance. * * - If literals are uncoded, then the next eight bits are the literal, in the * normal bit order in th stream, i.e. no bit-reversal is needed. Similarly, * no bit reversal is needed for either the length extra bits or the distance * extra bits. * * - Literal bytes are simply written to the output. A length/distance pair is * an instruction to copy previously uncompressed bytes to the output. The * copy is from distance bytes back in the output stream, copying for length * bytes. * * - Distances pointing before the beginning of the output data are not * permitted. * * - Overlapped copies, where the length is greater than the distance, are * allowed and common. For example, a distance of one and a length of 518 * simply copies the last byte 518 times. A distance of four and a length of * twelve copies the last four bytes three times. A simple forward copy * ignoring whether the length is greater than the distance or not implements * this correctly. */ static BlastResult blastDecompress(state * s) { int lit; /* true if literals are coded */ int dict; /* log2(dictionary size) - 6 */ int symbol; /* decoded symbol, extra bits for distance */ int len; /* length for copy */ int dist; /* distance for copy */ int copy; /* copy counter */ unsigned char * from, *to; /* copy pointers */ static int virgin = 1; /* build tables once */ static short litcnt[MAXBITS + 1], litsym[256]; /* litcode memory */ static short lencnt[MAXBITS + 1], lensym[16]; /* lencode memory */ static short distcnt[MAXBITS + 1], distsym[64]; /* distcode memory */ static huffman litcode = {litcnt, litsym}; /* length code */ static huffman lencode = {lencnt, lensym}; /* length code */ static huffman distcode = {distcnt, distsym};/* distance code */ /* bit lengths of literal codes */ static const unsigned char litlen[] = { 11, 124, 8, 7, 28, 7, 188, 13, 76, 4, 10, 8, 12, 10, 12, 10, 8, 23, 8, 9, 7, 6, 7, 8, 7, 6, 55, 8, 23, 24, 12, 11, 7, 9, 11, 12, 6, 7, 22, 5, 7, 24, 6, 11, 9, 6, 7, 22, 7, 11, 38, 7, 9, 8, 25, 11, 8, 11, 9, 12, 8, 12, 5, 38, 5, 38, 5, 11, 7, 5, 6, 21, 6, 10, 53, 8, 7, 24, 10, 27, 44, 253, 253, 253, 252, 252, 252, 13, 12, 45, 12, 45, 12, 61, 12, 45, 44, 173 }; /* bit lengths of length codes 0..15 */ static const unsigned char lenlen[] = {2, 35, 36, 53, 38, 23}; /* bit lengths of distance codes 0..63 */ static const unsigned char distlen[] = {2, 20, 53, 230, 247, 151, 248}; static const short base[16] = { /* base for length codes */ 3, 2, 4, 5, 6, 7, 8, 9, 10, 12, 16, 24, 40, 72, 136, 264 }; static const char extra[16] = { /* extra bits for length codes */ 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8 }; /* set up decoding tables (once--might not be thread-safe) */ if(virgin) { construct(&litcode, litlen, sizeof(litlen)); construct(&lencode, lenlen, sizeof(lenlen)); construct(&distcode, distlen, sizeof(distlen)); virgin = 0; } /* read header */ lit = bits(s, 8); if (lit > 1) return <API key>; dict = bits(s, 8); if (dict < 4 || dict > 6) return <API key>; /* decode literals and length/distance pairs */ do { if(bits(s, 1)) { /* get length */ symbol = decode(s, &lencode); len = base[symbol] + bits(s, extra[symbol]); if (len == 519) break; /* end code */ /* get distance */ symbol = len == 2 ? 2 : dict; dist = decode(s, &distcode) << symbol; dist += bits(s, symbol); dist++; if(s->first && dist > int(s->next)) { return <API key>; } /* copy length bytes from distance bytes back */ do { to = s->out + s->next; from = to - dist; copy = MAXWIN; if(int(s->next) < dist) { from += copy; copy = dist; } copy -= s->next; if (copy > len) copy = len; len -= copy; s->next += copy; do { *to++ = *from++; } while(--copy); if(s->next == MAXWIN) { if(s->outfun(s->outhow, s->out, s->next)) return BLAST_OUTPUT_ERROR; s->next = 0; s->first = 0; } } while(len != 0); } else { /* get literal and write it */ symbol = lit ? decode(s, &litcode) : bits(s, 8); s->out[s->next++] = symbol; if(s->next == MAXWIN) { if(s->outfun(s->outhow, s->out, s->next)) return BLAST_OUTPUT_ERROR; s->next = 0; s->first = 0; } } } while(true); return BLAST_SUCCESS; } BlastResult blast(blast_in infun, void * inhow, blast_out outfun, void * outhow) { state s; // initialize input state s.infun = infun; s.inhow = inhow; s.left = 0; s.bitbuf = 0; s.bitcnt = 0; // initialize output state s.outfun = outfun; s.outhow = outhow; s.next = 0; s.first = 1; BlastResult err; try { err = blastDecompress(&s); } catch(const <API key> &) { err = <API key>; } // write any leftover output and update the error code if needed if(err != 1 && s.next && s.outfun(s.outhow, s.out, s.next) && err == 0) { err = BLAST_OUTPUT_ERROR; } return err; } // Additional functions. size_t blastInMem(void * param, const unsigned char ** buf) { BlastMemInBuffer * p = static_cast<BlastMemInBuffer *>(param); *buf = reinterpret_cast<const unsigned char *>(p->buf); size_t size = p->size; p->buf += size; p->size = 0; return size; } int blastOutString(void * param, unsigned char * buf, size_t len) { BlastMemOutString * p = static_cast<BlastMemOutString *>(param); p->buffer.append(reinterpret_cast<const char *>(buf), len); return 0; } std::string blast(const char * from, size_t fromSize, size_t toSizeHint) { std::string uncompressed; uncompressed.reserve(toSizeHint == size_t(-1) ? fromSize : toSizeHint); BlastMemInBuffer in(from, fromSize); BlastMemOutString out(uncompressed); BlastResult error = blast(blastInMem, &in, blastOutString, &out); if(error) { LogError << "blast error " << int(error) << " for " << fromSize; uncompressed.clear(); } return uncompressed; }
IMAGES="test-r test-python" IMAGE_VERSION="0.1" DOCKER_REGISTRY="localhost:5000" for IMAGE in ${IMAGES} ; do docker build --tag ${DOCKER_REGISTRY}/dpa/${IMAGE}:${IMAGE_VERSION} tasks/${IMAGE} docker push ${DOCKER_REGISTRY}/dpa/${IMAGE}:${IMAGE_VERSION} done
from django.conf.urls.defaults import * """ Also used in cms.tests.ApphooksTestCase """ urlpatterns = patterns('cms.test_utils.project.sampleapp.views', url(r'^$', 'sample_view', {'message': 'sample root page',}, name='sample-root'), url(r'^settings/$', 'sample_view', kwargs={'message': 'sample settings page'}, name='sample-settings'), url(r'^account/$', 'sample_view', {'message': 'sample account page'}, name='sample-account'), url(r'^account/my_profile/$', 'sample_view', {'message': 'sample my profile page'}, name='sample-profile'), url(r'^(?P<id>[0-9]+)/$', 'category_view', name='category_view'), url(r'^notfound/$', 'notfound', name='notfound'), url(r'^extra_1/$', 'extra_view', {'message': 'test urlconf'}, name='extra_first'), url(r'^', include('cms.test_utils.project.sampleapp.urls_extra')), )
# Makefile for standard Robotnik component # BUILD = ./build/ BINDIR = ./bin/ SRC = ./src/ LIB = ./lib/ #LIBS = -L$(LIB) -lpthread -lremotelog -lcurses -lrt INC = ./include/SerialDevice CPP = g++ CCFLAGS = -Wall -c -g3 -I$(INC) # -g3 -Wno-deprecated -Wall OBJECTS = \ $(BUILD)SerialDevice.o \ default: $(BINDIR)$(EXE) all: $(BINDIR)$(EXE) $(BUILD)SerialDevice.o : $(SRC)SerialDevice.cc $(CPP) $(CCFLAGS) -o $(BUILD)SerialDevice.o $(SRC)SerialDevice.cc #$(BINDIR)$(EXE) : $(OBJECTS) # $(CPP) -I$(INC) -o $(BINDIR)$(EXE) $(OBJECTS) $(LIBS) clean: rm -fv $(BUILD)*.o rm -fv $(BINDIR)$(EXE)
#ifndef <API key> #define <API key> // Includes #include "MantidCurveFitting/DllConfig.h" #include "MantidAPI/FunctionDomain.h" #include "MantidAPI/FunctionValues.h" #include "MantidAPI/IDomainCreator.h" #include "MantidCurveFitting/CostFunctions/<API key>.h" #include "MantidCurveFitting/CostFunctions/CostFuncRwp.h" #include <stdexcept> #include <vector> #include <algorithm> namespace Mantid { namespace CurveFitting { class <API key> SeqDomain : public API::FunctionDomain { public: SeqDomain() : API::FunctionDomain(), m_currentIndex(0) {} Return the number of points in the domain size_t size() const override; Return the number of parts in the domain virtual size_t getNDomains() const; Create and return i-th domain and i-th values, (i-1)th domain is released. virtual void getDomainAndValues(size_t i, API::FunctionDomain_sptr &domain, API::FunctionValues_sptr &values) const; Add new domain creator void addCreator(API::IDomainCreator_sptr creator); Calculate the value of a least squares cost function virtual void leastSquaresVal(const CostFunctions::<API key> &leastSquares); Calculate the value, first and second derivatives of a least squares cost function virtual void <API key>( const CostFunctions::<API key> &leastSquares, bool evalDeriv, bool evalHessian); Calculate the value of a Rwp cost function void rwpVal(const CostFunctions::CostFuncRwp &rwp); Calculate the value, first and second derivatives of a RWP cost function void rwpValDerivHessian(const CostFunctions::CostFuncRwp &rwp, bool evalDeriv, bool evalHessian); Create an instance of SeqDomain in one of two forms: either SeqDomain for sequential domain creation or ParDomain for parallel calculations static SeqDomain *create(API::IDomainCreator::DomainType type); protected: Current index mutable size_t m_currentIndex; Currently active domain. mutable std::vector<API::FunctionDomain_sptr> m_domain; Currently active values. mutable std::vector<API::FunctionValues_sptr> m_values; Domain creators. std::vector<boost::shared_ptr<API::IDomainCreator>> m_creators; }; } // namespace CurveFitting } // namespace Mantid #endif /*<API key>*/
#ifndef _XGGState_h_INCLUDE #define _XGGState_h_INCLUDE #include <Foundation/NSArray.h> #include <Foundation/NSObject.h> #include "gsc/GSGState.h" #include <X11/Xlib.h> #include <X11/Xutil.h> #include "x11/XGServer.h" #ifdef HAVE_XFT #define id xwindowsid #include <X11/Xft/Xft.h> #undef id #endif @class NSBezierPath; @class NSFont; @interface XGGState : GSGState { @public void *context; void *windevice; XGDrawMechanism drawMechanism; GC xgcntxt; GC agcntxt; XGCValues gcv; Drawable draw; Drawable alpha_buffer; Region clipregion; #ifdef HAVE_XFT XftDraw *xft_draw; XftDraw *xft_alpha_draw; XftColor xft_color; #endif BOOL drawingAlpha; BOOL sharedGC; /* Do we own the GC or share it? */ } - (void) setWindowDevice: (void *)device; - (void) setGraphicContext: (GC)xGraphicContext; - (void) setGCValues: (XGCValues)values withMask: (int)mask; - (void) setClipMask; - (Region) xClipRegion; - (BOOL) hasDrawable; - (BOOL) hasGraphicContext; - (void *) windevice; - (Drawable) drawable; - (GC) graphicContext; - (NSRect) clipRect; #ifdef HAVE_XFT - (XftDraw *)xftDrawForDrawable: (Drawable)d; - (XftColor)xftColor; #endif - (XPoint) viewPointToX: (NSPoint)aPoint; - (XRectangle) viewRectToX: (NSRect)aRect; - (XPoint) windowPointToX: (NSPoint)aPoint; - (XRectangle) windowRectToX: (NSRect)aRect; @end @interface XGGState (Ops) - (NSDictionary *) GSReadRect: (NSRect)rect; @end #endif /* _XGGState_h_INCLUDE */
#!/usr/bin/env python3 # This file is part of ESPResSo. # ESPResSo is free software: you can redistribute it and/or modify # (at your option) any later version. # ESPResSo is distributed in the hope that it will be useful, # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the import os if not os.environ.get('CI_COMMIT_REF_NAME', '').startswith('PR-'): print("Not a pull request. Exiting now.") exit(0) import subprocess import gh_post SIZELIMIT = 10000 TOKEN_ESPRESSO_CI = 'style.patch' # Delete obsolete posts gh_post.<API key>(TOKEN_ESPRESSO_CI) MESSAGE = '''Your pull request does not meet our code formatting \ rules. {header}, please do one of the following: - You can download a patch with my suggested changes \ [here]({url}/artifacts/raw/style.patch), inspect it and make \ changes manually. - You can directly apply it to your repository by running \ `curl {url}/artifacts/raw/style.patch | git apply -`. - You can run `maintainer/CI/fix_style.sh` to automatically fix your coding \ style. This is the same command that I have executed to generate the patch \ above, but it requires certain tools to be installed on your computer. You can run `gitlab-runner exec docker style` afterwards to check if your \ changes worked out properly. Please note that there are often multiple ways to correctly format code. \ As I am just a robot, I sometimes fail to identify the most aesthetically \ pleasing way. So please look over my suggested changes and adapt them \ where the style does not make sense.\ ''' # If the working directory is not clean, post a new comment if subprocess.call(["git", "diff-index", "--quiet", "HEAD", " patch = subprocess.check_output(['git', '--no-pager', 'diff']) if len(patch) <= SIZELIMIT: comment = 'Specifically, I suggest you make the following changes:' comment += '\n```diff\n' comment += patch.decode('utf-8').replace('`', r'\`').strip() comment += '\n```\n' comment += 'To apply these changes' else: comment = 'To fix this' comment = MESSAGE.format(header=comment, url=gh_post.CI_JOB_URL) if patch: assert TOKEN_ESPRESSO_CI in comment gh_post.post_message(comment)
#include "config.h" #include "stats.h" #include "../libshared/record/reader.h" #include "../libshared/record/writer.h" #include "../libclient/canvas/aclfilter.h" #include <QCoreApplication> #include <QStringList> #include <QScopedPointer> #include <QRegularExpression> #include <QCommandLineParser> #include <QTextStream> #include <QFile> using namespace recording; void printVersion() { printf("dprectool " DRAWPILE_VERSION "\n"); printf("Protocol version: %s\n", qPrintable(protocol::ProtocolVersion::current().asString())); printf("Qt version: %s (compiled against %s)\n", qVersion(), QT_VERSION_STR); } bool convertRecording(const QString &inputfilename, const QString &outputfilename, const QString &outputFormat, bool doAclFiltering) { // Open input file Reader reader(inputfilename); Compatibility compat = reader.open(); switch(compat) { case INCOMPATIBLE: fprintf( stderr, "This recording is incompatible (format version %s). It was made with Drawpile version %s.\n", qPrintable(reader.formatVersion().asString()), qPrintable(reader.writerVersion()) ); return false; case NOT_DPREC: fprintf(stderr, "Input file is not a Drawpile recording!\n"); return false; case CANNOT_READ: fprintf(stderr, "Unable to read input file: %s\n", qPrintable(reader.errorString())); return false; case COMPATIBLE: case <API key>: case <API key>: // OK to proceed break; } // Open output file (stdout if no filename given) QScopedPointer<Writer> writer; if(outputfilename.isEmpty()) { // No output filename given? Write to stdout QFile *out = new QFile(); out->open(stdout, QFile::WriteOnly); writer.reset(new Writer(out)); out->setParent(writer.data()); writer->setEncoding(Writer::Encoding::Text); } else { writer.reset(new Writer(outputfilename)); } // Output format override if(outputFormat == "text") writer->setEncoding(Writer::Encoding::Text); else if(outputFormat == "binary") writer->setEncoding(Writer::Encoding::Binary); else if(!outputFormat.isEmpty()) { fprintf(stderr, "Invalid output format: %s\n", qPrintable(outputFormat)); return false; } // Open output file if(!writer->open()) { fprintf(stderr, "Couldn't open %s: %s\n", qPrintable(outputfilename), qPrintable(writer->errorString()) ); return false; } if(!writer->writeHeader(reader.metadata())) { fprintf(stderr, "Error while writing header: %s\n", qPrintable(writer->errorString()) ); return false; } // Prepare filters canvas::AclFilter aclFilter; aclFilter.reset(1, false); // Convert and/or filter recording bool notEof = true; do { MessageRecord mr = reader.readNext(); switch(mr.status) { case MessageRecord::OK: { if(doAclFiltering && !aclFilter.filterMessage(*mr.message)) { writer->writeMessage(*mr.message->asFiltered()); } else { if(!writer->writeMessage(*mr.message)) { fprintf(stderr, "Error while writing message: %s\n", qPrintable(writer->errorString()) ); return false; } } break; } case MessageRecord::INVALID: writer->writeComment(QStringLiteral("WARNING: Unrecognized message type %1 of length %2 at offset 0x%3") .arg(int(mr.invalid_type)) .arg(mr.invalid_len) .arg(reader.currentPosition()) ); break; case MessageRecord::END_OF_RECORDING: notEof = false; break; } } while(notEof); return true; } bool <API key>(const QString &inputFilename) { Reader reader(inputFilename); const Compatibility compat = reader.open(); char compatflag = '?'; switch(compat) { case COMPATIBLE: compatflag = 'C'; break; case <API key>: compatflag = 'M'; break; case <API key>: compatflag = 'U'; break; case INCOMPATIBLE: compatflag = 'I'; break; case NOT_DPREC: fprintf(stderr, "Not a drawpile recording!\n"); return false; case CANNOT_READ: fprintf(stderr, "Cannot read file: %s", qPrintable(reader.errorString())); return false; } printf("%c %s %s\n", compatflag, qPrintable(reader.formatVersion().asString()), reader.writerVersion().isEmpty() ? "(no writer version)" : qPrintable(reader.writerVersion()) ); return true; } int main(int argc, char *argv[]) { QCoreApplication app(argc, argv); QCoreApplication::setOrganizationName("drawpile"); QCoreApplication::<API key>("drawpile.net"); QCoreApplication::setApplicationName("dprectool"); QCoreApplication::<API key>(DRAWPILE_VERSION); // Set up command line arguments QCommandLineParser parser; parser.<API key>("Convert Drawpile recordings between text and binary formats"); parser.addHelpOption(); // --version, -v QCommandLineOption versionOption(QStringList() << "v" << "version", "Displays version information."); parser.addOption(versionOption); // --out, -o QCommandLineOption outOption(QStringList() << "o" << "out", "Output file", "output"); parser.addOption(outOption); // --format, -f QCommandLineOption formatOption(QStringList() << "f" << "format", "Output format (binary/text/version)", "format"); parser.addOption(formatOption); // --acl, -A QCommandLineOption aclOption(QStringList() << "A" << "acl", "Perform ACL filtering"); parser.addOption(aclOption); // --msg-freq QCommandLineOption msgFreqOption(QStringList() << "msg-freq", "Print message frequency table"); parser.addOption(msgFreqOption); // input file name parser.<API key>("input", "recording file", "<input.dprec>"); // Parse parser.process(app); if(parser.isSet(versionOption)) { printVersion(); return 0; } const QStringList inputfiles = parser.positionalArguments(); if(inputfiles.isEmpty()) { parser.showHelp(1); return 1; } const QString format = parser.value(formatOption); if(format == "version") { return !<API key>(inputfiles.at(0)); } if(parser.isSet(msgFreqOption)) { return <API key>(inputfiles.at(0)) ? 0 : 1; } if(!convertRecording( inputfiles.at(0), parser.value(outOption), parser.value(formatOption), parser.isSet(aclOption) )) return 1; return 0; }
var classgr__interleave = [ [ "~gr_interleave", "classgr__interleave.html#<API key>", null ], [ "check_topology", "classgr__interleave.html#<API key>", null ], [ "work", "classgr__interleave.html#<API key>", null ], [ "gr_make_interleave", "classgr__interleave.html#<API key>", null ] ];
(function(jQuery){jQuery.fn.addLittleSisToolbar=function(){var defaults={z_index:10002,height:180,width:100,background_color:'
! This file is F-compatible, except for upper/lower case conventions. ! Module CubatureRule_C2 USE Precision_Model, ONLY: stnd Implicit NONE PRIVATE PUBLIC :: Rule_C2a CONTAINS SUBROUTINE Rule_C2a(VER,INFOLD,AREA,NUMFUN,Integrand,BASVAL,RGNERR,NUM) ! !***BEGIN PROLOGUE Rule_C2a !***PURPOSE To compute basic integration rule values and ! corresponding error estimates. ! ***REVISION DATE 950531 (YYMMDD) (Fortran90 transformation) ! ***REVISION DATE 990527 (YYMMDD) (F transformation) ! ***AUTHOR ! Ronald Cools, Dept. of Computer Science, ! Katholieke Universiteit Leuven, Celestijnenlaan 200A, ! B-3001 Heverlee, Belgium ! Email: ronald@cs.kuleuven.ac.be ! ! ***REFERENCES ! The cubature formula of degree 13 with 37 points is from ! Rabinowitz & Richter. The tuning of the error estimator ! is described in: ! R. Cools. ! "The subdivision strategy and reliablity in adaptive ! integration revisited." ! Report TW 213, Dept. of Computer Science, K.U.Leuven, 1994. ! !***DESCRIPTION Rule_C2a computes basic integration rule values ! for a vector of integrands over a rectangular region. ! Rule_C2a also computes estimates for the errors by ! using several null rule approximations. ! ON ENTRY ! ! VER Real array of dimension (2,3). ! The coordinates of the vertices of the parallellogram. ! NUMFUN Integer. ! Number of components of the vector integrand. ! INFOLD Integer array ! Integrand Externally declared subroutine for computing ! all components of the integrand at the given ! evaluation point. ! It must have parameters (DIM,X,NUMFUN,FUNVLS) ! Input parameters: ! DIM = 2 ! X(1) The x-coordinate of the evaluation point. ! X(2) The y-coordinate of the evaluation point. ! NUMFUN Integer that defines the number of ! components of I. ! Output parameter: ! FUNVLS Real array of dimension NUMFUN ! that defines NUMFUN components of the integrand. ! ! ON RETURN ! ! BASVAL Real array of dimension NUMFUN. ! The values for the basic rule for each component ! of the integrand. ! RGNERR Real array of dimension NUMFUN. ! The error estimates for each component of the integrand. ! NUM Integer ! The number of function evaluations used. ! INFOLD Integer array ! !***ROUTINES CALLED Integrand !***END PROLOGUE Rule_C2a ! ! Global variables. ! INTERFACE FUNCTION Integrand(NUMFUN,X) RESULT(Value) USE Precision_Model INTEGER, INTENT(IN) :: NUMFUN REAL(kind=stnd), DIMENSION(:), INTENT(IN) :: X REAL(kind=stnd), DIMENSION(NUMFUN) :: Value END FUNCTION Integrand END INTERFACE INTEGER, INTENT(IN) :: NUMFUN INTEGER, INTENT(OUT) :: NUM INTEGER, DIMENSION(:), INTENT(IN OUT) :: INFOLD REAL(kind=stnd), INTENT(IN) :: AREA REAL(kind=stnd), DIMENSION(:,:), INTENT(IN) :: VER REAL(kind=stnd), DIMENSION(:), INTENT(OUT) :: BASVAL, RGNERR ! ! Parameters ! INTEGER, DIMENSION(0:3), PARAMETER :: & K = (/1,2,3,2/) ! Rule structure parameters INTEGER, PARAMETER :: & ORBITS = 8 ! Number of orbits in rule REAL(kind=stnd), PARAMETER :: & HALF = 0.5_stnd, & FOUR = 4.0_stnd, & CRIVAL = 0.4_stnd, & FACMED = 8.0_stnd, & FACOPT = FACMED/CRIVAL**2, & TRES = 50*EPSILON(HALF), & CUTOFF = 1.0E-4_stnd , & DFCLEV = 0.55_stnd REAL(kind=stnd), DIMENSION(0:2), PARAMETER :: & DFC = (/2.<API key>, & 1.0_stnd, & -2.<API key> /) ! ! Cubature formula of degree 13 with 37 points (Rabinowitz & Richter) ! ! ! Information for the generators ! INTEGER :: I REAL(kind=stnd), DIMENSION(1:2), PARAMETER :: & TYPE1 = (/ 0.<API key>, & 0.<API key> /) REAL(kind=stnd), DIMENSION(1:3), PARAMETER :: & TYPE2 = (/ 0.<API key>, & 0.<API key>, & 0.<API key> /) REAL(kind=stnd), DIMENSION(1:2,1:2), PARAMETER :: & TYPE3 = RESHAPE( SOURCE= & (/ 0.<API key>, & 0.<API key>, & 0.<API key>, & 0.<API key> /),& SHAPE=(/2,2/) ) ! The weights of the basic rule and the null rules. ! WEIGHT(1,1),...,WEIGHT(1,ORBITS) are weights for the basic rule. ! WEIGHT(I,1),...,WEIGHT(I,ORBITS) for I>1 are null rule weights. ! ! ! Weights of the cubature formula. ! REAL(kind=stnd), DIMENSION(ORBITS), PARAMETER :: & W1 = (/ & 2.<API key> , & 3.<API key> , & 1.<API key> , & 3.<API key> , & 1.<API key> , & 2.<API key> , & 3.<API key> , & 1.<API key> /) ! ! Weights of the rules of degree 7, 7, 5 , 5 , 3 , 3 and 1. ! REAL(kind=stnd), DIMENSION(ORBITS), PARAMETER :: & W2 = (/ & 7.<API key> , & 1.<API key> , & -2.<API key> , & 6.<API key> , & 2.<API key> , & 1.<API key> , & -1.<API key> , & -5.<API key> /) ! REAL(kind=stnd), DIMENSION(ORBITS), PARAMETER :: & W3 = (/ & 4.<API key> , & -1.<API key> , & -2.<API key> , & 1.<API key> , & -1.<API key> , & 1.<API key> , & -4.<API key> , & 1.<API key> /) ! REAL(kind=stnd), DIMENSION(ORBITS), PARAMETER :: & W4 = (/ & -5.<API key> , & 2.<API key> , & 1.<API key> , & 1.<API key> , & -7.<API key> , & 1.<API key> , & -1.<API key> , & 2.<API key> /) ! REAL(kind=stnd), DIMENSION(ORBITS), PARAMETER :: & W5 = (/ & -2.<API key> , & 2.<API key> , & -7.<API key> , & -1.<API key> , & -4.<API key> , & 2.<API key> , & 1.<API key> , & -1.<API key> /) ! REAL(kind=stnd), DIMENSION(ORBITS), PARAMETER :: & W6 = (/ & -3.<API key> , & 1.<API key> , & -4.<API key> , & -2.<API key> , & 6.<API key> , & -1.<API key> , & 5.<API key> , & 1.<API key> /) ! REAL(kind=stnd), DIMENSION(ORBITS), PARAMETER :: & W7 = (/ & 2.<API key> , & 2.<API key> , & -9.<API key> , & 4.<API key> , & -2.<API key> , & -2.<API key> , & -1.<API key> , & -2.<API key> /) ! REAL(kind=stnd), DIMENSION(ORBITS), PARAMETER :: & W8 = (/ & 2.<API key> , & -1.<API key> , & 1.<API key> , & -2.<API key> , & -8.<API key> , & 1.<API key> , & -1.<API key> , & 2.<API key> /) REAL(kind=stnd), DIMENSION(1:8,1:ORBITS), PARAMETER :: & WEIGHT = RESHAPE( SOURCE= (/ W1,W2,W3,W4,W5,W6,W7,W8 /),& SHAPE=(/8,ORBITS/), ORDER=(/2,1/) ) ! ! Local variables. ! INTEGER :: J,NUMBER,GENTYPE,NR,P REAL(kind=stnd):: R1,R2,R3,R,NOISE,DEG7,DEG5,DEG3,DEG1, & DIFFX,DIFFY,Z1,Z2 REAL(kind=stnd), DIMENSION(2,8) :: X REAL(kind=stnd), DIMENSION(NUMFUN,7) :: NullRule REAL(kind=stnd), DIMENSION(NUMFUN) :: SUMVAL ! !***FIRST EXECUTABLE STATEMENT Rule_C2a ! ! The number of points used by the cubature formula is ! NUM = K(0) + 4*K(1) + 4*K(2) + 8*K(3) NUM = 37 ! ! ! Initialise BASVAL and NullRule ! BASVAL = 0 NullRule = 0 P = 1 ! ! Compute contributions from orbits with 1, 4 and 8 points ! DO GENTYPE = 0,3 DO NR = 1,K(GENTYPE) SELECT CASE (GENTYPE) CASE (0) ! Generator ( 0 , 0 ) NUMBER = 1 X(:,1) = (VER(:,2)+VER(:,3))*HALF CASE (1) ! Generator ( z1 , 0 ) Z1 = TYPE1(NR) NUMBER = 4 Z1 = Z1*HALF X(:,1) = -VER(:,1)*Z1 + VER(:,2)*HALF + & VER(:,3)* (Z1+HALF) X(:,2) = VER(:,1)*Z1 + VER(:,2)*HALF + & VER(:,3)* (-Z1+HALF) X(:,3) = VER(:,1)*Z1 + VER(:,2)* (-Z1+HALF) + & VER(:,3)*HALF X(:,4) = -VER(:,1)*Z1 + VER(:,2)* (Z1+HALF) + & VER(:,3)*HALF CASE (2) ! Generator ( z(1) , z(1) ) Z1 = TYPE2(NR) NUMBER = 4 Z1 = Z1*HALF X(:,1) = -2*VER(:,1)*Z1 + VER(:,2)* (HALF+Z1) +& VER(:,3)* (Z1+HALF) X(:,2) = VER(:,2)* (HALF+Z1) + VER(:,3)* (-Z1+HALF) X(:,3) = VER(:,2)* (HALF-Z1) + VER(:,3)* (Z1+HALF) X(:,4) = 2*VER(:,1)*Z1 + VER(:,2)* (HALF-Z1) + & VER(:,3)* (-Z1+HALF) CASE (3) ! Generator ( z(1) , z(2) ) Z1 = TYPE3(1,NR)*HALF Z2 = TYPE3(2,NR)*HALF NUMBER = 8 X(:,1) = VER(:,1)* (-Z1-Z2) + & VER(:,2)* (HALF+Z2) + VER(:,3)* (HALF+Z1) X(:,2) = VER(:,1)* (+Z1-Z2) + & VER(:,2)* (HALF+Z2) + VER(:,3)* (HALF-Z1) X(:,3) = VER(:,1)* (-Z1+Z2) + & VER(:,2)* (HALF-Z2) + VER(:,3)* (HALF+Z1) X(:,4) = VER(:,1)* (+Z1+Z2) + & VER(:,2)* (HALF-Z2) + VER(:,3)* (HALF-Z1) X(:,5) = VER(:,1)* (-Z1-Z2) + & VER(:,2)* (HALF+Z1) + VER(:,3)* (HALF+Z2) X(:,6) = VER(:,1)* (+Z2-Z1) + & VER(:,2)* (HALF+Z1) + VER(:,3)* (HALF-Z2) X(:,7) = VER(:,1)* (-Z2+Z1) + & VER(:,2)* (HALF-Z1) + VER(:,3)* (HALF+Z2) X(:,8) = VER(:,1)* (+Z1+Z2) + & VER(:,2)* (HALF-Z1) + VER(:,3)* (HALF-Z2) END SELECT ! CALL Integrand(2,X(1,1),NUMFUN,SUMVAL) SUMVAL = Integrand(NUMFUN,X(:,1)) SELECT CASE (GENTYPE) CASE (0) DIFFy = SUMVAL(1)*DFC(0) DIFFx = DIFFy CASE (1) DIFFy = DIFFy + SUMVAL(1)*DFC(NR) END SELECT DO J = 2,NUMBER RGNERR = Integrand(NUMFUN,X(:,J)) ! CALL Integrand(2,X(1,J),NUMFUN,RGNERR) IF (GENTYPE == 1) THEN IF (J <= 2) THEN DIFFy = DIFFy + RGNERR(1)*DFC(NR) ELSE DIFFx = DIFFx + RGNERR(1)*DFC(NR) END IF END IF DO I = 1,NUMFUN SUMVAL(I) = SUMVAL(I) + RGNERR(I) END DO END DO DO J = 1,NUMFUN BASVAL(J) = BASVAL(J) + WEIGHT(1,P)*SUMVAL(J) DO I = 1,7 NullRule(J,I) = NullRule(J,I) + WEIGHT(I+1,P)*SUMVAL(J) END DO END DO P = P + 1 END DO END DO ! ! Decide on future subdivision direction ! DIFFy = ABS(DIFFy) DIFFx = ABS(DIFFx) IF (MAX(DIFFy,DIFFx) < CUTOFF) THEN INFOLD(4) = 0 ELSE IF (DIFFy < DFCLEV*DIFFx) THEN INFOLD(4) = 1 ELSE IF (DIFFx < DFCLEV*DIFFy) THEN INFOLD(4) = 2 ELSE INFOLD(4) = 0 END IF ! ! Compute errors. ! DO J = 1,NUMFUN NOISE = ABS(BASVAL(J))*TRES DEG7 = SQRT(NullRule(J,1)**2+NullRule(J,2)**2) IF (DEG7 <= NOISE) THEN RGNERR(J) = NOISE ELSE DEG5 = SQRT(NullRule(J,3)**2+NullRule(J,4)**2) DEG3 = SQRT(NullRule(J,5)**2+NullRule(J,6)**2) DEG1 = SQRT(NullRule(J,7)**2+NullRule(J,6)**2) IF (DEG5 /= 0) THEN R1 = DEG7/DEG5 ELSE R1 = 1 END IF IF (DEG3 /= 0) THEN R2 = DEG5/DEG3 ELSE R2 = 1 END IF IF (DEG1 /= 0) THEN R3 = DEG3/DEG1 ELSE R3 = 1 END IF R = MAX(R1,R2,R3) IF (R >= 1) THEN INFOLD(5) = 0 RGNERR(J) = FACMED*DEG7 ELSE IF (R >= CRIVAL) THEN INFOLD(5) = 0 RGNERR(J) = FACMED*DEG7*R ELSE INFOLD(5) = 1 RGNERR(J) = FACOPT* (R**3)*DEG7 END IF RGNERR(J) = MAX(NOISE,RGNERR(J)) END IF RGNERR(J) = AREA*RGNERR(J)/FOUR BASVAL(J) = AREA*BASVAL(J)/FOUR END DO RETURN END SUBROUTINE Rule_C2a END Module CubatureRule_C2
#ifndef CA_USERTOOLS_H #define CA_USERTOOLS_H #include <vector> namespace ca { std::vector<int> range(int from, int upto); } #endif // CA_USERTOOLS_H
// This program is free software; you can redistribute it and/or // published by the Free Software Foundation; either version 3 of // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // with this program; if not, write to the Free Software Foundation, Inc., // 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. // Linking this library statically or dynamically with other modules is making // a combined work based on this library. Thus, the terms and conditions of #include "tests/testUtils.hpp" <API key>(messageTest) <API key> VMIME_TEST(<API key>) VMIME_TEST_LIST_END void <API key>() { vmime::generationContext ctx; vmime::shared_ptr <vmime::message> msg = vmime::make_shared <vmime::message>(); msg->getHeader()->getField("Foo")->setValue(vmime::string("bar")); vmime::htmlTextPart textPart; textPart.setPlainText(vmime::make_shared <vmime::<API key>>("Foo bar bazé foo foo foo")); textPart.setText(vmime::make_shared <vmime::<API key>>("Foo bar <strong>bazé</strong> foo foo foo")); textPart.generateIn(msg, msg); // Estimated/computed generated size must be greater than the actual generated size const vmime::size_t genSize = msg->getGeneratedSize(ctx); const vmime::size_t actualSize = msg->generate().length(); std::ostringstream oss; oss << "estimated size (" << genSize << ") >= actual size (" << actualSize << ")"; VASSERT(oss.str(), genSize >= actualSize); } <API key>
#include <stdint.h> #include <platform.h> #include "drivers/io.h" #include "drivers/pwm_mapping.h" const uint16_t multiPPM[] = { PWM1 | (MAP_TO_PPM_INPUT << 8), // PPM input PWM9 | (MAP_TO_MOTOR_OUTPUT << 8), // Swap to servo if needed PWM10 | (MAP_TO_MOTOR_OUTPUT << 8), // Swap to servo if needed PWM11 | (MAP_TO_MOTOR_OUTPUT << 8), PWM12 | (MAP_TO_MOTOR_OUTPUT << 8), PWM13 | (MAP_TO_MOTOR_OUTPUT << 8), PWM14 | (MAP_TO_MOTOR_OUTPUT << 8), PWM5 | (MAP_TO_MOTOR_OUTPUT << 8), // Swap to servo if needed PWM6 | (MAP_TO_MOTOR_OUTPUT << 8), // Swap to servo if needed PWM7 | (MAP_TO_MOTOR_OUTPUT << 8), // Swap to servo if needed PWM8 | (MAP_TO_MOTOR_OUTPUT << 8), // Swap to servo if needed 0xFFFF }; const uint16_t multiPWM[] = { PWM1 | (MAP_TO_PWM_INPUT << 8), // input PWM2 | (MAP_TO_PWM_INPUT << 8), PWM3 | (MAP_TO_PWM_INPUT << 8), PWM4 | (MAP_TO_PWM_INPUT << 8), PWM5 | (MAP_TO_PWM_INPUT << 8), PWM6 | (MAP_TO_PWM_INPUT << 8), PWM7 | (MAP_TO_PWM_INPUT << 8), PWM8 | (MAP_TO_PWM_INPUT << 8), // input PWM9 | (MAP_TO_MOTOR_OUTPUT << 8), // motor #1 or servo #1 (swap to servo if needed) PWM10 | (MAP_TO_MOTOR_OUTPUT << 8), // motor #2 or servo #2 (swap to servo if needed) PWM11 | (MAP_TO_MOTOR_OUTPUT << 8), // motor #1 or #3 PWM12 | (MAP_TO_MOTOR_OUTPUT << 8), PWM13 | (MAP_TO_MOTOR_OUTPUT << 8), PWM14 | (MAP_TO_MOTOR_OUTPUT << 8), // motor #4 or #6 0xFFFF }; const uint16_t airPPM[] = { PWM1 | (MAP_TO_PPM_INPUT << 8), // PPM input PWM9 | (MAP_TO_MOTOR_OUTPUT << 8), // motor PWM10 | (MAP_TO_MOTOR_OUTPUT << 8), // motor PWM11 | (MAP_TO_SERVO_OUTPUT << 8), // servo PWM12 | (MAP_TO_SERVO_OUTPUT << 8), PWM13 | (MAP_TO_SERVO_OUTPUT << 8), PWM14 | (MAP_TO_SERVO_OUTPUT << 8), // servo PWM5 | (MAP_TO_SERVO_OUTPUT << 8), // servo PWM6 | (MAP_TO_SERVO_OUTPUT << 8), PWM7 | (MAP_TO_SERVO_OUTPUT << 8), PWM8 | (MAP_TO_SERVO_OUTPUT << 8), // servo 0xFFFF }; const uint16_t airPWM[] = { PWM1 | (MAP_TO_PWM_INPUT << 8), // input PWM2 | (MAP_TO_PWM_INPUT << 8), PWM3 | (MAP_TO_PWM_INPUT << 8), PWM4 | (MAP_TO_PWM_INPUT << 8), PWM5 | (MAP_TO_PWM_INPUT << 8), PWM6 | (MAP_TO_PWM_INPUT << 8), PWM7 | (MAP_TO_PWM_INPUT << 8), PWM8 | (MAP_TO_PWM_INPUT << 8), // input PWM9 | (MAP_TO_MOTOR_OUTPUT << 8), // motor PWM10 | (MAP_TO_MOTOR_OUTPUT << 8), // motor PWM11 | (MAP_TO_SERVO_OUTPUT << 8), // servo PWM12 | (MAP_TO_SERVO_OUTPUT << 8), PWM13 | (MAP_TO_SERVO_OUTPUT << 8), PWM14 | (MAP_TO_SERVO_OUTPUT << 8), // servo 0xFFFF }; const timerHardware_t timerHardware[<API key>] = { { TIM1, IO_TAG(PA8), TIM_Channel_1, TIM1_CC_IRQn, 1, IOCFG_AF_PP_PD, GPIO_AF_6 }, // PWM1 - PA8 { TIM16, IO_TAG(PB8), TIM_Channel_1, TIM1_UP_TIM16_IRQn, 0, IOCFG_AF_PP_PD, GPIO_AF_1 }, // PWM2 - PB8 { TIM17, IO_TAG(PB9), TIM_Channel_1, <API key>, 0, IOCFG_AF_PP_PD, GPIO_AF_1 }, // PWM3 - PB9 { TIM8, IO_TAG(PC6), TIM_Channel_1, TIM8_CC_IRQn, 1, IOCFG_AF_PP_PD, GPIO_AF_4 }, // PWM4 - PC6 { TIM8, IO_TAG(PC7), TIM_Channel_2, TIM8_CC_IRQn, 1, IOCFG_AF_PP_PD, GPIO_AF_4 }, // PWM5 - PC7 { TIM8, IO_TAG(PC8), TIM_Channel_3, TIM8_CC_IRQn, 1, IOCFG_AF_PP_PD, GPIO_AF_4 }, // PWM6 - PC8 { TIM3, IO_TAG(PB1), TIM_Channel_4, TIM3_IRQn, 0, IOCFG_AF_PP_PD, GPIO_AF_2 }, // PWM7 - PB1 { TIM3, IO_TAG(PA4), TIM_Channel_2, TIM3_IRQn, 0, IOCFG_AF_PP_PD, GPIO_AF_2 }, // PWM8 - PA2 { TIM4, IO_TAG(PD12), TIM_Channel_1, TIM4_IRQn, 0, IOCFG_AF_PP, GPIO_AF_2 }, // PWM9 - PD12 { TIM4, IO_TAG(PD13), TIM_Channel_2, TIM4_IRQn, 0, IOCFG_AF_PP, GPIO_AF_2 }, // PWM10 - PD13 { TIM4, IO_TAG(PD14), TIM_Channel_3, TIM4_IRQn, 0, IOCFG_AF_PP, GPIO_AF_2 }, // PWM11 - PD14 { TIM4, IO_TAG(PD15), TIM_Channel_4, TIM4_IRQn, 0, IOCFG_AF_PP, GPIO_AF_2 }, // PWM12 - PD15 { TIM2, IO_TAG(PA1), TIM_Channel_2, TIM2_IRQn, 0, IOCFG_AF_PP, GPIO_AF_1 }, // PWM13 - PA1 { TIM2, IO_TAG(PA2), TIM_Channel_3, TIM2_IRQn, 0, IOCFG_AF_PP, GPIO_AF_1 } // PWM14 - PA2 };
#pragma ident "@(#) libfi/mpp_scan/scanmax_sp_6.c 92.1 07/13/99 10:21:33" #include <stdlib.h> #include <liberrno.h> #include <fmath.h> #include <cray/dopevec.h> #include "f90_macros.h" #define RANK 6 /* * Compiler generated call: CALL _SCANMAX_SP6(RES, SRC, STOP, DIM, MASK) * * Purpose: Determine the maximum value of the elements of SRC * along dimension DIM corresponding to the true elements * of MASK. This particular routine handles source arrays * of rank 6 with a data type of 64-bit floating point. * * Arguments: * RES - Dope vector for temporary result array * SRC - Dope vector for user source array * STOP - Dope vector for stop array * DIM - Dimension to operate along * MASK - Dope vector for logical mask array * * Description: * This is the MPP version of SCANMAX. This particular * file contains the the intermediate type-specific * routines. These routines parse and update the dope * vectors, allocate either shared or private space for * the result temporary, and possibly update the shared * data desriptor (sdd) for the result temporary. Once * this set-up work is complete, a Fortran subroutine * is called which uses features from the Fortran * Programming Model to distribute the word across all * processors. * * Include file segmented_scan_p.h contains the rank independent * source code for this routine. */ void _SCANMAX_SP6 ( DopeVectorType *result, DopeVectorType *source, DopeVectorType *stop, long *dim, DopeVectorType *mask) { #include "segmented_scan_p.h" if (stop_flag > 0) { if (mask_flag == 1) { SCANMAX_MASK_SP6@ (result_sdd_ptr, source_sdd_ptr, stop_sdd_ptr, &dim_val, mask_sdd_ptr, src_extents, blkcnts); } else { SCANMAX_NOMASK_SP6@ (result_sdd_ptr, source_sdd_ptr, stop_sdd_ptr, &dim_val, src_extents, blkcnts); } } }
/* Optimized BLAS libraries */ /* By Kazushige Goto <kgoto@tacc.utexas.edu> */ /* UNIVERSITY EXPRESSLY DISCLAIMS ANY AND ALL WARRANTIES CONCERNING */ /* THIS SOFTWARE AND DOCUMENTATION, INCLUDING ANY WARRANTIES OF */ /* MERCHANTABILITY, FITNESS FOR ANY PARTICULAR PURPOSE, */ /* THAT MIGHT OTHERWISE ARISE FROM COURSE OF DEALING OR USAGE OF */ /* THE USE OF THE SOFTWARE OR DOCUMENTATION. */ /* Under no circumstances shall University be liable for incidental, */ /* special, indirect, direct or consequential damages or loss of */ /* profits, interruption of business, or related expenses which may */ /* arise from use of Software or Documentation, including but not */ /* limited to those resulting from defects in Software and/or */ /* Documentation, or loss or inaccuracy of data of any kind. */ #include <stdio.h> #include "common.h" #ifndef SMP #define blas_cpu_number 1 #else int blas_cpu_number = 1; int blas_get_cpu_number(void){ return blas_cpu_number; } #endif #define FIXED_PAGESIZE 4096 void *sa = NULL; void *sb = NULL; static double static_buffer[BUFFER_SIZE/sizeof(double)]; void *blas_memory_alloc(int numproc){ if (sa == NULL){ #if 1 sa = (void *)qalloc(QFAST, BUFFER_SIZE); #else sa = (void *)malloc(BUFFER_SIZE); #endif sb = (void *)&static_buffer[0]; } return sa; } void blas_memory_free(void *free_area){ return; }
#ifdef __cplusplus extern "C" { #endif /* Get next in EBYEDAT data buffers (exogam) */ int <API key>(UNSINT16* Buffer, UNSINT16** EvtAddr, int* EvtNum, int EvtFormat); /* Get next in EBYEDAT data buffers (exogam) ; reentrant version */ int <API key>(UNSINT16* Buffer, UNSINT16** EvtAddr, int* EvtNum, int EvtFormat, UNSINT16** NextEvent); #ifdef __cplusplus } #endif
// This file is part of Parity. // Parity is free software: you can redistribute it and/or modify // (at your option) any later version. // Parity is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the import { observer } from 'mobx-react'; import React, { Component, PropTypes } from 'react'; import { FormattedMessage } from 'react-intl'; import shapeshiftLogo from '~/../assets/images/shapeshift-logo.png'; import { Button, IdentityIcon, Portal } from '~/ui'; import { CancelIcon, DoneIcon } from '~/ui/Icons'; import AwaitingDepositStep from './AwaitingDepositStep'; import <API key> from './<API key>'; import CompletedStep from './CompletedStep'; import ErrorStep from './ErrorStep'; import OptionsStep from './OptionsStep'; import Store, { STAGE_COMPLETED, STAGE_OPTIONS, STAGE_WAIT_DEPOSIT, STAGE_WAIT_EXCHANGE } from './store'; import styles from './shapeshift.css'; const STAGE_TITLES = [ <FormattedMessage id='shapeshift.title.details' defaultMessage='details' />, <FormattedMessage id='shapeshift.title.deposit' defaultMessage='awaiting deposit' />, <FormattedMessage id='shapeshift.title.exchange' defaultMessage='awaiting exchange' />, <FormattedMessage id='shapeshift.title.completed' defaultMessage='completed' /> ]; const ERROR_TITLE = ( <FormattedMessage id='shapeshift.title.error' defaultMessage='exchange failed' /> ); @observer export default class Shapeshift extends Component { static contextTypes = { store: PropTypes.object.isRequired } static propTypes = { address: PropTypes.string.isRequired, onClose: PropTypes.func } store = new Store(this.props.address); componentDidMount () { this.store.retrieveCoins(); } <API key> () { this.store.unsubscribe(); } render () { const { error, stage } = this.store; return ( <Portal activeStep={ stage } busySteps={ [ STAGE_WAIT_DEPOSIT, STAGE_WAIT_EXCHANGE ] } buttons={ this.renderDialogActions() } onClose={ this.onClose } open steps={ error ? null : STAGE_TITLES } title={ error ? ERROR_TITLE : null } > { this.renderPage() } </Portal> ); } renderDialogActions () { const { address } = this.props; const { coins, error, hasAcceptedTerms, stage } = this.store; const logo = ( <a className={ styles.shapeshift } href='http://shapeshift.io' key='logo' target='_blank' > <img src={ shapeshiftLogo } /> </a> ); const cancelBtn = ( <Button icon={ <CancelIcon /> } key='cancel' label={ <FormattedMessage id='shapeshift.button.cancel' defaultMessage='Cancel' /> } onClick={ this.onClose } /> ); if (error) { return [ logo, cancelBtn ]; } switch (stage) { case STAGE_OPTIONS: return [ logo, cancelBtn, <Button disabled={ !coins.length || !hasAcceptedTerms } icon={ <IdentityIcon address={ address } button /> } key='shift' label={ <FormattedMessage id='shapeshift.button.shift' defaultMessage='Shift Funds' /> } onClick={ this.onShift } /> ]; case STAGE_WAIT_DEPOSIT: case STAGE_WAIT_EXCHANGE: return [ logo, cancelBtn ]; case STAGE_COMPLETED: return [ logo, <Button icon={ <DoneIcon /> } key='done' label={ <FormattedMessage id='shapeshift.button.done' defaultMessage='Close' /> } onClick={ this.onClose } /> ]; } } renderPage () { const { error, stage } = this.store; if (error) { return ( <ErrorStep store={ this.store } /> ); } switch (stage) { case STAGE_OPTIONS: return ( <OptionsStep store={ this.store } /> ); case STAGE_WAIT_DEPOSIT: return ( <AwaitingDepositStep store={ this.store } /> ); case STAGE_WAIT_EXCHANGE: return ( <<API key> store={ this.store } /> ); case STAGE_COMPLETED: return ( <CompletedStep store={ this.store } /> ); } } onClose = () => { this.store.setStage(STAGE_OPTIONS); this.props.onClose && this.props.onClose(); } onShift = () => { return this.store.shift(); } }
<?php namespace pocketmine\network\protocol; #include <rules/DataPacket.h> class RemoveBlockPacket extends DataPacket{ const NETWORK_ID = Info::REMOVE_BLOCK_PACKET; public $x; public $y; public $z; public function getName(){ return "RemoveBlockPacket"; } public function decode(){ $this->getBlockCoords($this->x, $this->y, $this->z); } public function encode(){ } }
#include <ostream> #include <gtest/gtest.h> #include <barrett/systems/gain.h> #include <barrett/systems/<API key>.h> #include <barrett/systems/helpers.h> #include "./exposed_io_system.h" namespace { using namespace barrett; class GainSystemTest : public ::testing::Test { public: GainSystemTest() { mem.startManaging(eios); } protected: systems::<API key> mem; ExposedIOSystem<double> eios; }; TEST_F(GainSystemTest, <API key>) { systems::Gain<double> gainSys(12.5); EXPECT_FALSE(gainSys.input.valueDefined()) << "value defined without input"; } TEST_F(GainSystemTest, ConnectsIO) { systems::Gain<double> gainSys(1.0); systems::connect(eios.output, gainSys.input); systems::connect(gainSys.output, eios.input); checkConnected(mem, &eios, eios, 3463.2); } TEST_F(GainSystemTest, MultipliesInput) { systems::Gain<double> gainSys(14.2); systems::connect(eios.output, gainSys.input); systems::connect(gainSys.output, eios.input); eios.setOutputValue(-38.52); mem.runExecutionCycle(); EXPECT_EQ(14.2 * -38.52, eios.getInputValue()); } TEST_F(GainSystemTest, SetGain) { systems::Gain<double> gainSys(14.2); systems::connect(eios.output, gainSys.input); systems::connect(gainSys.output, eios.input); eios.setOutputValue(-38.52); mem.runExecutionCycle(); EXPECT_EQ(14.2 * -38.52, eios.getInputValue()); gainSys.setGain(-3.8); mem.runExecutionCycle(); EXPECT_EQ(-3.8 * -38.52, eios.getInputValue()); } using std::ostream; class A; class B; class C; class A { friend const C operator * (const B& b, const A& a); private: float value; public: A() : value(0.0) {} explicit A(float value) : value(value) {} }; class B { friend const C operator * (const B& b, const A& a); private: float value; public: explicit B(float value) : value(value) {} }; class C { friend ostream& operator<<(ostream& os, C c); private: float value; public: C() : value(0.0) {} explicit C(float value) : value(value) {} bool operator== (const C& other) const { return value == other.value; } }; const C operator* (const B& b, const A& a) { return C(a.value * b.value); } ostream& operator<<(ostream& os, C c) { os << c.value; return os; } // mostly, we just want this to compile TEST_F(GainSystemTest, <API key>) { systems::Gain<A, B, C> gainSys(B(-3.0)); ExposedIOSystem<A> out; ExposedIOSystem<C> in; mem.startManaging(in); systems::connect(gainSys.output, in.input); systems::connect(out.output, gainSys.input); out.setOutputValue(A(9.0)); mem.runExecutionCycle(); EXPECT_EQ(B(-3.0) * A(9.0), in.getInputValue()) << "did multiplication wrong"; } }
<?php class <API key> extends <API key> { public function indexAction() { $schemesCache = $this->getDI()->get('<API key>'); $user = <API key>::requireFromIdentity(); $tenant = $this->readTenant()->getOpenSkos2Tenant(); $this->view->assign('conceptSchemes', $schemesCache->fetchUrisMap()); $this->view->assign('<API key>', $user-><API key>); $this->view->assign('exportForm', Editor_Forms_Export::getInstance()); $this->view->assign('deleteForm', Editor_Forms_Delete::getInstance()); $this->view->assign('changeStatusForm', <API key>::getInstance()); $this->view->assign('oActiveUser', $user); $this->view->assign('oActiveTenant', $tenant); $this->view->assign('searchForm', Editor_Forms_Search::getInstance()); } }
. $PSScriptRoot\Shared.ps1 InModuleScope PSJira { [System.Diagnostics.CodeAnalysis.SuppressMessage('<API key>', '', Scope='*', Target='<API key>')] $<API key> = $true . $PSScriptRoot\Shared.ps1 Describe "<API key>" { if ($ShowDebugText) { Mock 'Write-Debug' { Write-Host " [DEBUG] $Message" -ForegroundColor Yellow } } Mock <API key> { 'https://jira.example.com' } # If we don't override this in a context or test, we don't want it to # actually try to query a JIRA instance Mock Invoke-JiraMethod -ModuleName PSJira { if ($ShowMockData) { Write-Host " Mocked Invoke-WebRequest" -ForegroundColor Cyan Write-Host " [Uri] $Uri" -ForegroundColor Cyan Write-Host " [Method] $Method" -ForegroundColor Cyan } } Context "Sanity checking" { $command = Get-Command -Name <API key> function defParam($name) { It "Has a -$name parameter" { $command.Parameters.Item($name) | Should Not BeNullOrEmpty } } defParam 'Project' defParam 'IssueType' defParam 'Credential' } Context "Behavior testing" { $restResult = ConvertFrom-Json2 @' { "expand": "projects", "projects": [ { "expand": "issuetypes", "self": "https://jira.example.com/rest/api/2/project/10003", "id": "10003", "key": "TEST", "name": "Test Project", "issuetypes": [ { "self": "https://jira.example.com/rest/api/latest/issuetype/2", "id": "2", "iconUrl": "https://jira.example.com/images/icons/issuetypes/newfeature.png", "name": "Test Issue Type", "subtask": false, "expand": "fields", "fields": { "summary": { "required": true, "schema": { "type": "string", "system": "summary" }, "name": "Summary", "hasDefaultValue": false, "operations": [ "set" ] }, "issuetype": { "required": true, "schema": { "type": "issuetype", "system": "issuetype" }, "name": "Issue Type", "hasDefaultValue": false, "operations": [], "allowedValues": [ { "self": "https://jira.example.com/rest/api/2/issuetype/2", "id": "2", "description": "This is a test issue type", "iconUrl": "https://jira.example.com/images/icons/issuetypes/newfeature.png", "name": "Test Issue Type", "subtask": false } ] }, "description": { "required": false, "schema": { "type": "string", "system": "description" }, "name": "Description", "hasDefaultValue": false, "operations": [ "set" ] }, "project": { "required": true, "schema": { "type": "project", "system": "project" }, "name": "Project", "hasDefaultValue": false, "operations": [ "set" ], "allowedValues": [ { "self": "https://jira.example.com/rest/api/2/project/10003", "id": "10003", "key": "TEST", "name": "Test Project", "projectCategory": { "self": "https://jira.example.com/rest/api/2/projectCategory/10000", "id": "10000", "description": "All Project Catagories", "name": "All Project" } } ] }, "reporter": { "required": true, "schema": { "type": "user", "system": "reporter" }, "name": "Reporter", "autoCompleteUrl": "https://jira.example.com/rest/api/latest/user/search?username=", "hasDefaultValue": false, "operations": [ "set" ] }, "assignee": { "required": false, "schema": { "type": "user", "system": "assignee" }, "name": "Assignee", "autoCompleteUrl": "https://jira.example.com/rest/api/latest/user/assignable/search?issueKey=null&username=", "hasDefaultValue": false, "operations": [ "set" ] }, "priority": { "required": false, "schema": { "type": "priority", "system": "priority" }, "name": "Priority", "hasDefaultValue": true, "operations": [ "set" ], "allowedValues": [ { "self": "https://jira.example.com/rest/api/2/priority/1", "iconUrl": "https://jira.example.com/images/icons/priorities/blocker.png", "name": "Blocker", "id": "1" }, { "self": "https://jira.example.com/rest/api/2/priority/2", "iconUrl": "https://jira.example.com/images/icons/priorities/critical.png", "name": "Critical", "id": "2" }, { "self": "https://jira.example.com/rest/api/2/priority/3", "iconUrl": "https://jira.example.com/images/icons/priorities/major.png", "name": "Major", "id": "3" }, { "self": "https://jira.example.com/rest/api/2/priority/4", "iconUrl": "https://jira.example.com/images/icons/priorities/minor.png", "name": "Minor", "id": "4" }, { "self": "https://jira.example.com/rest/api/2/priority/5", "iconUrl": "https://jira.example.com/images/icons/priorities/trivial.png", "name": "Trivial", "id": "5" } ] }, "labels": { "required": false, "schema": { "type": "array", "items": "string", "system": "labels" }, "name": "Labels", "autoCompleteUrl": "https://jira.example.com/rest/api/1.0/labels/suggest?query=", "hasDefaultValue": false, "operations": [ "add", "set", "remove" ] } } } ] } ] } '@ Mock Get-JiraProject -ModuleName PSJira { [PSCustomObject] @{ ID = 10003; Name = 'Test Project'; } } Mock Get-JiraIssueType -ModuleName PSJira { [PSCustomObject] @{ ID = 2; Name = 'Test Issue Type'; } } It "Queries Jira for metadata information about creating an issue" { { <API key> -Project 10003 -IssueType 2 } | Should Not Throw Assert-MockCalled -CommandName Invoke-JiraMethod -ModuleName PSJira -Exactly -Times 1 -Scope It -ParameterFilter {$Method -eq 'Get' -and $URI -like '*/rest/api/*/issue/createmeta?projectIds=10003&issuetypeIds=2&expand=projects.issuetypes.fields'} } It "Uses <API key> to output CreateMetaField objects if JIRA returns data" { # This is a simplified version of what JIRA will give back Mock Invoke-JiraMethod -ModuleName PSJira { @{ projects = @{ issuetypes = @{ fields = [PSCustomObject] @{ 'a' = 1; 'b' = 2; } } } } } Mock <API key> -ModuleName PSJira {} { <API key> -Project 10003 -IssueType 2 } | Should Not Throw Assert-MockCalled -CommandName Invoke-JiraMethod -ModuleName PSJira -Exactly -Times 1 -Scope It -ParameterFilter {$Method -eq 'Get' -and $URI -like '*/rest/api/*/issue/createmeta?projectIds=10003&issuetypeIds=2&expand=projects.issuetypes.fields'} # There are 2 example fields in our mock above, but they should # be passed to <API key> as a single object. # The method should only be called once. Assert-MockCalled -CommandName <API key> -ModuleName PSJira -Exactly -Times 1 -Scope It } } } }
#pragma once #define LONG_NAME "<%= config.info.longName %>" #define VERSION_LABEL "<%= config.info.versionLabel %>" #define UUID "<%= config.info.uuid %>" <% for (prop in config.info.appKeys) { %>#define <%= prop %> <%= config.info.appKeys[prop] %> <% } %>
#ifndef COIN_3DSLOADER_H #define COIN_3DSLOADER_H #include <Inventor/C/basic.h> // for M_PI class SoInput; class SoSeparator; SbBool coin_3ds_read_file(SoInput * in, SoSeparator *& root, int appendNormals = 2, float creaseAngle = 25.f/180.f*M_PI, SbBool loadMaterials = TRUE, SbBool loadTextures = TRUE, SbBool loadObjNames = FALSE, SbBool indexedTriSet = FALSE, SbBool centerModel = TRUE, float modelSize = 10.f); #endif // !COIN_3DSLOADER_H
<?php /** * @file * This is the template file for the metadata description for an object. * * Available variables: * - $islandora_object: The Islandora object rendered in this template file * - $found: Boolean indicating if a Solr doc was found for the current object. * * @see <API key>() * @see <API key>() */ ?> <?php if ($found && !empty($description)): ?> <div class="<API key>"> <?php if ($combine): ?> <h2><?php if (count($description) > 1): print (t('Description')); else: $desc_array = reset($description); print ($desc_array['display_label']); ?> <?php endif; ?></h2> <?php foreach($description as $value): ?> <p property="description"><?php print check_markup(implode("\n", $value['value']), 'filtered_html'); ?></p> <?php endforeach; ?> <?php else: ?> <?php foreach ($description as $value): ?> <h2><?php print $value['display_label']; ?></h2> <p><?php print check_markup(implode("\n", $value['value']), 'filtered_html'); ?></p> <?php endforeach; ?> <?php endif; ?> </div> <?php endif; ?>
#!/usr/bin/python # This file is part of Ansible # Ansible is free software: you can redistribute it and/or modify # (at your option) any later version. # Ansible is distributed in the hope that it will be useful, # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ANSIBLE_METADATA = {'status': ['stableinterface'], 'supported_by': 'committer', 'version': '1.0'} EXAMPLES = ''' # Basic configuration - ec2_asg: name: special load_balancers: [ 'lb1', 'lb2' ] availability_zones: [ 'eu-west-1a', 'eu-west-1b' ] launch_config_name: 'lc-1' min_size: 1 max_size: 10 desired_capacity: 5 vpc_zone_identifier: [ 'subnet-abcd1234', 'subnet-1a2b3c4d' ] tags: - environment: production propagate_at_launch: no # Rolling ASG Updates Below is an example of how to assign a new launch config to an ASG and terminate old instances. All instances in "myasg" that do not have the launch configuration named "my_new_lc" will be terminated in a rolling fashion with instances using the current launch configuration, "my_new_lc". This could also be considered a rolling deploy of a pre-baked AMI. If this is a newly created group, the instances will not be replaced since all instances will have the current launch configuration. - name: create launch config ec2_lc: name: my_new_lc image_id: ami-lkajsf key_name: mykey region: us-east-1 security_groups: sg-23423 instance_type: m1.small assign_public_ip: yes - ec2_asg: name: myasg launch_config_name: my_new_lc health_check_period: 60 health_check_type: ELB <API key>: yes min_size: 5 max_size: 5 desired_capacity: 5 region: us-east-1 To only replace a couple of instances instead of all of them, supply a list to "replace_instances": - ec2_asg: name: myasg launch_config_name: my_new_lc health_check_period: 60 health_check_type: ELB replace_instances: - i-b345231 - i-24c2931 min_size: 5 max_size: 5 desired_capacity: 5 region: us-east-1 ''' import time import logging as log import traceback from ansible.module_utils.basic import * from ansible.module_utils.ec2 import * log.getLogger('boto').setLevel(log.CRITICAL) #log.basicConfig(filename='/tmp/ansible_ec2_asg.log',level=log.DEBUG, format='%(asctime)s: %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p') try: import boto.ec2.autoscale from boto.ec2.autoscale import AutoScaleConnection, AutoScalingGroup, Tag from boto.exception import BotoServerError HAS_BOTO = True except ImportError: HAS_BOTO = False ASG_ATTRIBUTES = ('availability_zones', 'default_cooldown', 'desired_capacity', 'health_check_period', 'health_check_type', 'launch_config_name', 'load_balancers', 'max_size', 'min_size', 'name', 'placement_group', '<API key>', 'vpc_zone_identifier') INSTANCE_ATTRIBUTES = ('instance_id', 'health_status', 'lifecycle_state', 'launch_config_name') def <API key>(module): ''' As many arguments are not required for autoscale group deletion they cannot be mandatory arguments for the module, so we enforce them here ''' missing_args = [] for arg in ('min_size', 'max_size', 'launch_config_name'): if module.params[arg] is None: missing_args.append(arg) if missing_args: module.fail_json(msg="Missing required arguments for autoscaling group create/update: %s" % ",".join(missing_args)) def get_properties(autoscaling_group): properties = dict((attr, getattr(autoscaling_group, attr)) for attr in ASG_ATTRIBUTES) # Ugly hack to make this JSON-serializable. We take a list of boto Tag # objects and replace them with a dict-representation. Needed because the # tags are included in ansible's return value (which is jsonified) if 'tags' in properties and isinstance(properties['tags'], list): serializable_tags = {} for tag in properties['tags']: serializable_tags[tag.key] = [tag.value, tag.propagate_at_launch] properties['tags'] = serializable_tags properties['healthy_instances'] = 0 properties['<API key>'] = 0 properties['unhealthy_instances'] = 0 properties['pending_instances'] = 0 properties['viable_instances'] = 0 properties['<API key>'] = 0 instance_facts = {} if autoscaling_group.instances: properties['instances'] = [i.instance_id for i in autoscaling_group.instances] for i in autoscaling_group.instances: instance_facts[i.instance_id] = {'health_status': i.health_status, 'lifecycle_state': i.lifecycle_state, 'launch_config_name': i.launch_config_name } if i.health_status == 'Healthy' and i.lifecycle_state == 'InService': properties['viable_instances'] += 1 if i.health_status == 'Healthy': properties['healthy_instances'] += 1 else: properties['unhealthy_instances'] += 1 if i.lifecycle_state == 'InService': properties['<API key>'] += 1 if i.lifecycle_state == 'Terminating': properties['<API key>'] += 1 if i.lifecycle_state == 'Pending': properties['pending_instances'] += 1 properties['instance_facts'] = instance_facts properties['load_balancers'] = autoscaling_group.load_balancers if getattr(autoscaling_group, "tags", None): properties['tags'] = dict((t.key, t.value) for t in autoscaling_group.tags) return properties def elb_dreg(asg_connection, module, group_name, instance_id): region, ec2_url, aws_connect_params = <API key>(module) as_group = asg_connection.get_all_groups(names=[group_name])[0] wait_timeout = module.params.get('wait_timeout') props = get_properties(as_group) count = 1 if as_group.load_balancers and as_group.health_check_type == 'ELB': try: elb_connection = connect_to_aws(boto.ec2.elb, region, **aws_connect_params) except boto.exception.NoAuthHandlerFound as e: module.fail_json(msg=str(e)) else: return for lb in as_group.load_balancers: elb_connection.<API key>(lb, instance_id) log.debug("De-registering {0} from ELB {1}".format(instance_id, lb)) wait_timeout = time.time() + wait_timeout while wait_timeout > time.time() and count > 0: count = 0 for lb in as_group.load_balancers: lb_instances = elb_connection.<API key>(lb) for i in lb_instances: if i.instance_id == instance_id and i.state == "InService": count += 1 log.debug("{0}: {1}, {2}".format(i.instance_id, i.state, i.description)) time.sleep(10) if wait_timeout <= time.time(): # waiting took too long module.fail_json(msg = "Waited too long for instance to deregister. {0}".format(time.asctime())) def elb_healthy(asg_connection, elb_connection, module, group_name): healthy_instances = set() as_group = asg_connection.get_all_groups(names=[group_name])[0] props = get_properties(as_group) # get healthy, inservice instances from ASG instances = [] for instance, settings in props['instance_facts'].items(): if settings['lifecycle_state'] == 'InService' and settings['health_status'] == 'Healthy': instances.append(instance) log.debug("ASG considers the following instances InService and Healthy: {0}".format(instances)) log.debug("ELB instance status:") for lb in as_group.load_balancers: # we catch a race condition that sometimes happens if the instance exists in the ASG # but has not yet show up in the ELB try: lb_instances = elb_connection.<API key>(lb, instances=instances) except boto.exception.BotoServerError as e: if e.error_code == 'InvalidInstance': return None module.fail_json(msg=str(e)) for i in lb_instances: if i.state == "InService": healthy_instances.add(i.instance_id) log.debug("{0}: {1}".format(i.instance_id, i.state)) return len(healthy_instances) def wait_for_elb(asg_connection, module, group_name): region, ec2_url, aws_connect_params = <API key>(module) wait_timeout = module.params.get('wait_timeout') # if the health_check_type is ELB, we want to query the ELBs directly for instance # status as to avoid health_check_grace period that is awarded to ASG instances as_group = asg_connection.get_all_groups(names=[group_name])[0] if as_group.load_balancers and as_group.health_check_type == 'ELB': log.debug("Waiting for ELB to consider instances healthy.") try: elb_connection = connect_to_aws(boto.ec2.elb, region, **aws_connect_params) except boto.exception.NoAuthHandlerFound as e: module.fail_json(msg=str(e)) wait_timeout = time.time() + wait_timeout healthy_instances = elb_healthy(asg_connection, elb_connection, module, group_name) while healthy_instances < as_group.min_size and wait_timeout > time.time(): healthy_instances = elb_healthy(asg_connection, elb_connection, module, group_name) log.debug("ELB thinks {0} instances are healthy.".format(healthy_instances)) time.sleep(10) if wait_timeout <= time.time(): # waiting took too long module.fail_json(msg = "Waited too long for ELB instances to be healthy. %s" % time.asctime()) log.debug("Waiting complete. ELB thinks {0} instances are healthy.".format(healthy_instances)) def suspend_processes(as_group, module): suspend_processes = set(module.params.get('suspend_processes')) try: suspended_processes = set([p.process_name for p in as_group.suspended_processes]) except AttributeError: # New ASG being created, no suspended_processes defined yet suspended_processes = set() if suspend_processes == suspended_processes: return False resume_processes = list(suspended_processes - suspend_processes) if resume_processes: as_group.resume_processes(resume_processes) if suspend_processes: as_group.suspend_processes(list(suspend_processes)) return True def <API key>(connection, module): group_name = module.params.get('name') load_balancers = module.params['load_balancers'] availability_zones = module.params['availability_zones'] launch_config_name = module.params.get('launch_config_name') min_size = module.params['min_size'] max_size = module.params['max_size'] placement_group = module.params.get('placement_group') desired_capacity = module.params.get('desired_capacity') vpc_zone_identifier = module.params.get('vpc_zone_identifier') set_tags = module.params.get('tags') health_check_period = module.params.get('health_check_period') health_check_type = module.params.get('health_check_type') default_cooldown = module.params.get('default_cooldown') wait_for_instances = module.params.get('wait_for_instances') as_groups = connection.get_all_groups(names=[group_name]) wait_timeout = module.params.get('wait_timeout') <API key> = module.params.get('<API key>') notification_topic = module.params.get('notification_topic') notification_types = module.params.get('notification_types') if not vpc_zone_identifier and not availability_zones: region, ec2_url, aws_connect_params = <API key>(module) try: ec2_connection = connect_to_aws(boto.ec2, region, **aws_connect_params) except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e: module.fail_json(msg=str(e)) elif vpc_zone_identifier: vpc_zone_identifier = ','.join(vpc_zone_identifier) asg_tags = [] for tag in set_tags: for k,v in tag.items(): if k !='propagate_at_launch': asg_tags.append(Tag(key=k, value=v, propagate_at_launch=bool(tag.get('propagate_at_launch', True)), resource_id=group_name)) if not as_groups: if not vpc_zone_identifier and not availability_zones: availability_zones = module.params['availability_zones'] = [zone.name for zone in ec2_connection.get_all_zones()] <API key>(module) launch_configs = connection.<API key>(names=[launch_config_name]) if len(launch_configs) == 0: module.fail_json(msg="No launch config found with name %s" % launch_config_name) ag = AutoScalingGroup( group_name=group_name, load_balancers=load_balancers, availability_zones=availability_zones, launch_config=launch_configs[0], min_size=min_size, max_size=max_size, placement_group=placement_group, desired_capacity=desired_capacity, vpc_zone_identifier=vpc_zone_identifier, connection=connection, tags=asg_tags, health_check_period=health_check_period, health_check_type=health_check_type, default_cooldown=default_cooldown, <API key>=<API key>) try: connection.<API key>(ag) suspend_processes(ag, module) if wait_for_instances: wait_for_new_inst(module, connection, group_name, wait_timeout, desired_capacity, 'viable_instances') wait_for_elb(connection, module, group_name) if notification_topic: ag.<API key>(notification_topic, notification_types) as_group = connection.get_all_groups(names=[group_name])[0] asg_properties = get_properties(as_group) changed = True return(changed, asg_properties) except BotoServerError as e: module.fail_json(msg="Failed to create Autoscaling Group: %s" % str(e), exception=traceback.format_exc(e)) else: as_group = as_groups[0] changed = False if suspend_processes(as_group, module): changed = True for attr in ASG_ATTRIBUTES: if module.params.get(attr, None) is not None: module_attr = module.params.get(attr) if attr == 'vpc_zone_identifier': module_attr = ','.join(module_attr) group_attr = getattr(as_group, attr) # we do this because AWS and the module may return the same list # sorted differently if attr != '<API key>': try: module_attr.sort() except: pass try: group_attr.sort() except: pass if group_attr != module_attr: changed = True setattr(as_group, attr, module_attr) if len(set_tags) > 0: have_tags = {} want_tags = {} for tag in asg_tags: want_tags[tag.key] = [tag.value, tag.propagate_at_launch] dead_tags = [] for tag in as_group.tags: have_tags[tag.key] = [tag.value, tag.propagate_at_launch] if tag.key not in want_tags: changed = True dead_tags.append(tag) if dead_tags != []: connection.delete_tags(dead_tags) if have_tags != want_tags: changed = True connection.<API key>(asg_tags) # handle loadbalancers separately because None != [] load_balancers = module.params.get('load_balancers') or [] if load_balancers and as_group.load_balancers != load_balancers: changed = True as_group.load_balancers = module.params.get('load_balancers') if changed: try: as_group.update() except BotoServerError as e: module.fail_json(msg="Failed to update Autoscaling Group: %s" % str(e), exception=traceback.format_exc(e)) if notification_topic: try: as_group.<API key>(notification_topic, notification_types) except BotoServerError as e: module.fail_json(msg="Failed to update Autoscaling Group notifications: %s" % str(e), exception=traceback.format_exc(e)) if wait_for_instances: wait_for_new_inst(module, connection, group_name, wait_timeout, desired_capacity, 'viable_instances') wait_for_elb(connection, module, group_name) try: as_group = connection.get_all_groups(names=[group_name])[0] asg_properties = get_properties(as_group) except BotoServerError as e: module.fail_json(msg="Failed to read existing Autoscaling Groups: %s" % str(e), exception=traceback.format_exc(e)) return(changed, asg_properties) def <API key>(connection, module): group_name = module.params.get('name') notification_topic = module.params.get('notification_topic') if notification_topic: ag.<API key>(notification_topic) groups = connection.get_all_groups(names=[group_name]) if groups: group = groups[0] group.max_size = 0 group.min_size = 0 group.desired_capacity = 0 group.update() instances = True while instances: tmp_groups = connection.get_all_groups(names=[group_name]) if tmp_groups: tmp_group = tmp_groups[0] if not tmp_group.instances: instances = False time.sleep(10) group.delete() while len(connection.get_all_groups(names=[group_name])): time.sleep(5) changed=True return changed else: changed=False return changed def get_chunks(l, n): for i in xrange(0, len(l), n): yield l[i:i+n] def update_size(group, max_size, min_size, dc): log.debug("setting ASG sizes") log.debug("minimum size: {0}, desired_capacity: {1}, max size: {2}".format(min_size, dc, max_size )) group.max_size = max_size group.min_size = min_size group.desired_capacity = dc group.update() def replace(connection, module): batch_size = module.params.get('replace_batch_size') wait_timeout = module.params.get('wait_timeout') group_name = module.params.get('name') max_size = module.params.get('max_size') min_size = module.params.get('min_size') desired_capacity = module.params.get('desired_capacity') lc_check = module.params.get('lc_check') replace_instances = module.params.get('replace_instances') as_group = connection.get_all_groups(names=[group_name])[0] wait_for_new_inst(module, connection, group_name, wait_timeout, as_group.min_size, 'viable_instances') props = get_properties(as_group) instances = props['instances'] if replace_instances: instances = replace_instances #check if min_size/max_size/desired capacity have been specified and if not use ASG values if min_size is None: min_size = as_group.min_size if max_size is None: max_size = as_group.max_size if desired_capacity is None: desired_capacity = as_group.desired_capacity # check to see if instances are replaceable if checking launch configs new_instances, old_instances = get_instances_by_lc(props, lc_check, instances) num_new_inst_needed = desired_capacity - len(new_instances) if lc_check: if num_new_inst_needed == 0 and old_instances: log.debug("No new instances needed, but old instances are present. Removing old instances") terminate_batch(connection, module, old_instances, instances, True) as_group = connection.get_all_groups(names=[group_name])[0] props = get_properties(as_group) changed = True return(changed, props) # we don't want to spin up extra instances if not necessary if num_new_inst_needed < batch_size: log.debug("Overriding batch size to {0}".format(num_new_inst_needed)) batch_size = num_new_inst_needed if not old_instances: changed = False return(changed, props) # set temporary settings and wait for them to be reached # This should get overwritten if the number of instances left is less than the batch size. as_group = connection.get_all_groups(names=[group_name])[0] update_size(as_group, max_size + batch_size, min_size + batch_size, desired_capacity + batch_size) wait_for_new_inst(module, connection, group_name, wait_timeout, as_group.min_size, 'viable_instances') wait_for_elb(connection, module, group_name) as_group = connection.get_all_groups(names=[group_name])[0] props = get_properties(as_group) instances = props['instances'] if replace_instances: instances = replace_instances log.debug("beginning main loop") for i in get_chunks(instances, batch_size): # break out of this loop if we have enough new instances break_early, desired_size, term_instances = terminate_batch(connection, module, i, instances, False) wait_for_term_inst(connection, module, term_instances) wait_for_new_inst(module, connection, group_name, wait_timeout, desired_size, 'viable_instances') wait_for_elb(connection, module, group_name) as_group = connection.get_all_groups(names=[group_name])[0] if break_early: log.debug("breaking loop") break update_size(as_group, max_size, min_size, desired_capacity) as_group = connection.get_all_groups(names=[group_name])[0] asg_properties = get_properties(as_group) log.debug("Rolling update complete.") changed=True return(changed, asg_properties) def get_instances_by_lc(props, lc_check, initial_instances): new_instances = [] old_instances = [] # old instances are those that have the old launch config if lc_check: for i in props['instances']: if props['instance_facts'][i]['launch_config_name'] == props['launch_config_name']: new_instances.append(i) else: old_instances.append(i) else: log.debug("Comparing initial instances with current: {0}".format(initial_instances)) for i in props['instances']: if i not in initial_instances: new_instances.append(i) else: old_instances.append(i) log.debug("New instances: {0}, {1}".format(len(new_instances), new_instances)) log.debug("Old instances: {0}, {1}".format(len(old_instances), old_instances)) return new_instances, old_instances def <API key>(props, lc_check, replace_instances, initial_instances): <API key> = [] instances = ( inst_id for inst_id in replace_instances if inst_id in props['instances']) # check to make sure instances given are actually in the given ASG # and they have a non-current launch config if lc_check: for i in instances: if props['instance_facts'][i]['launch_config_name'] != props['launch_config_name']: <API key>.append(i) else: for i in instances: if i in initial_instances: <API key>.append(i) return <API key> def terminate_batch(connection, module, replace_instances, initial_instances, leftovers=False): batch_size = module.params.get('replace_batch_size') min_size = module.params.get('min_size') desired_capacity = module.params.get('desired_capacity') group_name = module.params.get('name') wait_timeout = int(module.params.get('wait_timeout')) lc_check = module.params.get('lc_check') decrement_capacity = False break_loop = False as_group = connection.get_all_groups(names=[group_name])[0] props = get_properties(as_group) desired_size = as_group.min_size new_instances, old_instances = get_instances_by_lc(props, lc_check, initial_instances) num_new_inst_needed = desired_capacity - len(new_instances) # check to make sure instances given are actually in the given ASG # and they have a non-current launch config <API key> = <API key>(props, lc_check, replace_instances, initial_instances) log.debug("new instances needed: {0}".format(num_new_inst_needed)) log.debug("new instances: {0}".format(new_instances)) log.debug("old instances: {0}".format(old_instances)) log.debug("batch instances: {0}".format(",".join(<API key>))) if num_new_inst_needed == 0: decrement_capacity = True if as_group.min_size != min_size: as_group.min_size = min_size as_group.update() log.debug("Updating minimum size back to original of {0}".format(min_size)) #if are some leftover old instances, but we are already at capacity with new ones # we don't want to decrement capacity if leftovers: decrement_capacity = False break_loop = True <API key> = old_instances desired_size = min_size log.debug("No new instances needed") if num_new_inst_needed < batch_size and num_new_inst_needed !=0 : <API key> = <API key>[:num_new_inst_needed] decrement_capacity = False break_loop = False log.debug("{0} new instances needed".format(num_new_inst_needed)) log.debug("decrementing capacity: {0}".format(decrement_capacity)) for instance_id in <API key>: elb_dreg(connection, module, group_name, instance_id) log.debug("terminating instance: {0}".format(instance_id)) connection.terminate_instance(instance_id, decrement_capacity=decrement_capacity) # we wait to make sure the machines we marked as Unhealthy are # no longer in the list return break_loop, desired_size, <API key> def wait_for_term_inst(connection, module, term_instances): batch_size = module.params.get('replace_batch_size') wait_timeout = module.params.get('wait_timeout') group_name = module.params.get('name') lc_check = module.params.get('lc_check') as_group = connection.get_all_groups(names=[group_name])[0] props = get_properties(as_group) count = 1 wait_timeout = time.time() + wait_timeout while wait_timeout > time.time() and count > 0: log.debug("waiting for instances to terminate") count = 0 as_group = connection.get_all_groups(names=[group_name])[0] props = get_properties(as_group) instance_facts = props['instance_facts'] instances = ( i for i in instance_facts if i in term_instances) for i in instances: lifecycle = instance_facts[i]['lifecycle_state'] health = instance_facts[i]['health_status'] log.debug("Instance {0} has state of {1},{2}".format(i,lifecycle,health )) if lifecycle == 'Terminating' or health == 'Unhealthy': count += 1 time.sleep(10) if wait_timeout <= time.time(): # waiting took too long module.fail_json(msg = "Waited too long for old instances to terminate. %s" % time.asctime()) def wait_for_new_inst(module, connection, group_name, wait_timeout, desired_size, prop): # make sure we have the latest stats after that last loop. as_group = connection.get_all_groups(names=[group_name])[0] props = get_properties(as_group) log.debug("Waiting for {0} = {1}, currently {2}".format(prop, desired_size, props[prop])) # now we make sure that we have enough instances in a viable state wait_timeout = time.time() + wait_timeout while wait_timeout > time.time() and desired_size > props[prop]: log.debug("Waiting for {0} = {1}, currently {2}".format(prop, desired_size, props[prop])) time.sleep(10) as_group = connection.get_all_groups(names=[group_name])[0] props = get_properties(as_group) if wait_timeout <= time.time(): # waiting took too long module.fail_json(msg = "Waited too long for new instances to become viable. %s" % time.asctime()) log.debug("Reached {0}: {1}".format(prop, desired_size)) return props def main(): argument_spec = ec2_argument_spec() argument_spec.update( dict( name=dict(required=True, type='str'), load_balancers=dict(type='list'), availability_zones=dict(type='list'), launch_config_name=dict(type='str'), min_size=dict(type='int'), max_size=dict(type='int'), placement_group=dict(type='str'), desired_capacity=dict(type='int'), vpc_zone_identifier=dict(type='list'), replace_batch_size=dict(type='int', default=1), <API key>=dict(type='bool', default=False), replace_instances=dict(type='list', default=[]), lc_check=dict(type='bool', default=True), wait_timeout=dict(type='int', default=300), state=dict(default='present', choices=['present', 'absent']), tags=dict(type='list', default=[]), health_check_period=dict(type='int', default=300), health_check_type=dict(default='EC2', choices=['EC2', 'ELB']), default_cooldown=dict(type='int', default=300), wait_for_instances=dict(type='bool', default=True), <API key>=dict(type='list', default='Default'), notification_topic=dict(type='str', default=None), notification_types=dict(type='list', default=[ 'autoscaling:EC2_INSTANCE_LAUNCH', 'autoscaling:<API key>', 'autoscaling:<API key>', 'autoscaling:<API key>' ]), suspend_processes=dict(type='list', default=[]) ), ) module = AnsibleModule( argument_spec=argument_spec, mutually_exclusive = [['<API key>', 'replace_instances']] ) if not HAS_BOTO: module.fail_json(msg='boto required for this module') state = module.params.get('state') replace_instances = module.params.get('replace_instances') <API key> = module.params.get('<API key>') region, ec2_url, aws_connect_params = <API key>(module) try: connection = connect_to_aws(boto.ec2.autoscale, region, **aws_connect_params) if not connection: module.fail_json(msg="failed to connect to AWS for the given region: %s" % str(region)) except boto.exception.NoAuthHandlerFound as e: module.fail_json(msg=str(e)) changed = create_changed = replace_changed = False if state == 'present': create_changed, asg_properties=<API key>(connection, module) elif state == 'absent': changed = <API key>(connection, module) module.exit_json( changed = changed ) if <API key> or replace_instances: replace_changed, asg_properties=replace(connection, module) if create_changed or replace_changed: changed = True module.exit_json( changed = changed, **asg_properties ) if __name__ == '__main__': main()
using EloBuddy; using LeagueSharp.Common; namespace Nasus { using LeagueSharp.Common; public class MenuInit { public static Menu Menu; public static void Initialize() { Menu = new Menu("Nasus - The Crazy Dog", "L# Nasus", true); var orbwalkerMenu = new Menu("Orbwalker", "orbwalker"); Standards.Orbwalker = new Orbwalking.Orbwalker(orbwalkerMenu); Menu.AddSubMenu(orbwalkerMenu); TargetSelector.AddToMenu(TargetSelectorMenu()); #region Combo Menu var comboMenu = Menu.AddSubMenu(new Menu("Combo", "MenuCombo")); { comboMenu .AddItem(new MenuItem("Combo.Use.Q", "Use Q").SetValue(true)); comboMenu .AddItem(new MenuItem("Combo.Use.W", "Use W").SetValue(true)); comboMenu .AddItem(new MenuItem("Combo.Use.E", "Use E").SetValue(true)); comboMenu .AddItem(new MenuItem("Combo.Use.R", "Use R").SetValue(true)); comboMenu .AddItem(new MenuItem("Combo.Min.HP.Use.R", "HP to use R").SetValue(new Slider(35))); } #endregion #region Harass Menu var harassMenu = Menu.AddSubMenu(new Menu("Harass", "MenuHarass")); { harassMenu .AddItem(new MenuItem("Harass.Use.Q", "Use Q").SetValue(true)); harassMenu .AddItem(new MenuItem("Harass.Use.W", "Use W").SetValue(true)); harassMenu .AddItem(new MenuItem("Harass.Use.E", "Use E").SetValue(true)); } #endregion #region Lane Clear var laneClearMenu = Menu.AddSubMenu(new Menu("Lane Clear", "MenuLaneClear")); { laneClearMenu .AddItem(new MenuItem("LaneClear.Use.Q", "Use Q").SetValue(true)); laneClearMenu .AddItem(new MenuItem("LaneClear.Use.E", "Use E").SetValue(true)); } #endregion #region Last Hit var lastHitMenu = Menu.AddSubMenu(new Menu("Stack Siphoning Strike", "MenuStackQ")); { lastHitMenu .AddItem(new MenuItem("Use.StackQ", "Stack").SetValue(true)); } #endregion Menu.AddItem(new MenuItem("devCredits", "Dev by @ TwoHam")); Menu.AddToMainMenu(); } private static Menu TargetSelectorMenu() { return Menu.AddSubMenu(new Menu("Target Selector", "TargetSelector")); } } }
import lxml.html as l import requests def key_char_parse(char_id): url = 'https://vndb.org/c' + str(char_id) page = requests.get(url) root = l.fromstring(page.text) name = root.cssselect('.mainbox h1')[0].text kanji_name = root.cssselect('.mainbox h2.alttitle')[0].text img = 'https:' + root.cssselect('.mainbox .charimg img')[0].attrib['src'] gender = root.cssselect('.chardetails table thead tr td abbr')[0].attrib['title'] try: bloodtype = root.cssselect('.chardetails table thead tr td span')[0].text except IndexError: bloodtype = None table = root.cssselect('.chardetails table')[0] for row in table: if row.tag == 'tr': if len(row) == 2: try: key = row[0][0].text except IndexError: key = row[0].text value = None try: if row[1][0].tag == 'a': value = row[1][0].text else: value = [] for span in row[1]: if 'charspoil_1' in span.classes: tag = 'minor spoiler' elif 'charspoil_2' in span.classes: tag = 'spoiler' elif 'sexual' in span.classes: tag = 'sexual trait' else: tag = None value.append({'value': span[1].text, 'tag': tag}) except IndexError: value = row[1].text if key == 'Visual novels': value = [] for span in row[1]: if span.tag == 'span': value.append(span.text + span[0].text) desc = root.cssselect('.chardetails table td.chardesc')[0][1].text character = { 'URL': url, 'Name': name, 'Name_J': kanji_name, 'Image': img, 'Gender': gender, 'Blood_Type': bloodtype, 'Description': desc } return character
<?php // Heading $_['heading_title'] = 'OpenBay Pro'; // Text $_['text_module'] = ''; $_['text_installed'] = 'OpenBayPro -> OpenBay Pro';
#include "w32prefix.h" #include "globdefs.h" #include "filedefs.h" #include "objdefs.h" #include "parsedef.h" #include "mcio.h" //#include "execpt.h" bool <API key>(const char *p_path, void*& r_native_path) { unichar_t *t_w_path; t_w_path = nil; if (!MCCStringToUnicode(p_path, t_w_path)) return false; for(uint32_t i = 0; t_w_path[i] != 0; i++) if (t_w_path[i] == '/') t_w_path[i] = '\\'; r_native_path = t_w_path; return true; } bool <API key>(const void *p_native_path, char*& r_path) { char *t_path; t_path = nil; if (!<API key>((const unichar_t *)p_native_path, t_path)) return false; for(uint32_t i = 0; t_path[i] != 0; i++) if (t_path[i] == '\\') t_path[i] = '/'; r_path = t_path; return true; } bool <API key>(const char *p_folder, uint32_t p_options, <API key> p_callback, void *p_context) { bool t_success; t_success = true; char *t_pattern; t_pattern = nil; if (t_success)
define( [ 'jquery', 'stapes', './conditionals' ], function( $, Stapes, <API key> ) { 'use strict'; /** * Global Mediator (included on every page) * @module Globals * @implements {Stapes} */ var Mediator = Stapes.subclass({ /** * Reference to conditionals mediator singleton * @type {Object} */ conditionals: <API key>, /** * Mediator Constructor * @return {void} */ constructor: function (){ var self = this; self.initEvents(); $(function(){ self.emit('domready'); }); }, /** * Initialize events * @return {void} */ initEvents: function(){ var self = this; self.on('domready', self.onDomReady); // // DEBUG // <API key>.on('all', function(val, e){ // console.log(e.type, val); }, /** * DomReady Callback * @return {void} */ onDomReady: function(){ var self = this; } }); return new Mediator(); } );
<html><head><body>Pet Manager Lundy:<br> All the people who can give you information about pet wolves are here in the <font color="LEVEL">Town of Gludio</font>.<br> First, go see <font color="LEVEL">Gatekeeper Bella</font>. </body></html>
require 'spec_helper' describe 'puppet::agent' do on_supported_os.each do |os, os_facts| next if only_test_os() and not only_test_os.include?(os) next if exclude_test_os() and exclude_test_os.include?(os) context "on #{os}" do let (:default_facts) do os_facts.merge({ :clientcert => 'puppetmaster.example.com', :concat_basedir => '/nonexistant', :fqdn => 'puppetmaster.example.com', :puppetversion => Puppet.version, }) end if Puppet.version < '4.0' client_package = 'puppet' confdir = '/etc/puppet' case os_facts[:osfamily] when 'FreeBSD' client_package = 'puppet38' confdir = '/usr/local/etc/puppet' when 'windows' client_package = 'puppet' confdir = 'C:/ProgramData/PuppetLabs/puppet/etc' end additional_facts = {} else client_package = 'puppet-agent' confdir = '/etc/puppetlabs/puppet' additional_facts = {:rubysitedir => '/opt/puppetlabs/puppet/lib/ruby/site_ruby/2.1.0'} case os_facts[:osfamily] when 'FreeBSD' client_package = 'puppet4' confdir = '/usr/local/etc/puppet' additional_facts = {} when 'windows' client_package = 'puppet-agent' confdir = 'C:/ProgramData/PuppetLabs/puppet/etc' additional_facts = {} end end let :facts do default_facts.merge(additional_facts) end describe 'with no custom parameters' do let :pre_condition do "class {'puppet': agent => true}" end it { should contain_class('puppet::agent::install') } it { should contain_class('puppet::agent::config') } it { should contain_class('puppet::agent::service') } it { should contain_file(confdir).with_ensure('directory') } it { should contain_concat("#{confdir}/puppet.conf") } it { should contain_package(client_package).with_ensure('present') } it do should <API key>('puppet.conf+20-agent'). with_content(/^\[agent\]/). with({}) end it do should <API key>('puppet.conf+20-agent'). with_content(/server.*puppetmaster\.example\.com/) end it do should <API key>('puppet.conf+20-agent'). without_content(/prerun_command\s*=/) end it do should <API key>('puppet.conf+20-agent'). without_content(/postrun_command\s*=/) end end describe 'puppetmaster parameter overrides server fqdn' do let(:pre_condition) { "class {'puppet': agent => true, puppetmaster => 'mymaster.example.com'}" } it do should <API key>('puppet.conf+20-agent'). with_content(/server.*mymaster\.example\.com/) end end describe 'global puppetmaster overrides fqdn' do let(:pre_condition) { "class {'puppet': agent => true}" } let :facts do default_facts.merge({:puppetmaster => 'mymaster.example.com'}) end it do should <API key>('puppet.conf+20-agent'). with_content(/server.*mymaster\.example\.com/) end end describe 'puppetmaster parameter overrides global puppetmaster' do let(:pre_condition) { "class {'puppet': agent => true, puppetmaster => 'mymaster.example.com'}" } let :facts do default_facts.merge({:puppetmaster => 'global.example.com'}) end it do should <API key>('puppet.conf+20-agent'). with_content(/server.*mymaster\.example\.com/) end end describe 'use_srv_records removes server setting' do let(:pre_condition) { "class {'puppet': agent => true, use_srv_records => true}" } it do should <API key>('puppet.conf+20-agent'). without_content(/server\s*=/) end end describe 'set prerun_command will be included in config' do let(:pre_condition) { "class {'puppet': agent => true, prerun_command => '/my/prerun'}" } it do should <API key>('puppet.conf+20-agent'). with_content(/prerun_command.*\/my\/prerun/) end end describe 'set postrun_command will be included in config' do let(:pre_condition) { "class {'puppet': agent => true, postrun_command => '/my/postrun'}" } it do should <API key>('puppet.conf+20-agent'). with_content(/postrun_command.*\/my\/postrun/) end end describe 'with additional settings' do let :pre_condition do "class {'puppet': <API key> => {ignoreschedules => true}, }" end it 'should configure puppet.conf' do should <API key>('puppet.conf+20-agent'). with_content(/^\s+ignoreschedules\s+= true$/). with({}) # So we can use a trailing dot on each with_content line end end end end end
<?php return [ 'AF' => 'Afganisztán', 'AX' => 'Åland-szigetek', 'AL' => 'Albánia', 'DZ' => 'Algéria', 'AS' => 'Amerikai Szamoa', 'VI' => 'Amerikai Virgin-szigetek', 'AD' => 'Andorra', 'AO' => 'Angola', 'AI' => 'Anguilla', 'AQ' => 'Antarktisz', 'AG' => 'Antigua és Barbuda', 'AR' => 'Argentína', 'AW' => 'Aruba', 'AU' => 'Ausztrália', 'AT' => 'Ausztria', 'UM' => 'Az USA lakatlan külbirtokai', 'AZ' => 'Azerbajdzsán', 'BS' => 'Bahama-szigetek', 'BH' => 'Bahrein', 'BD' => 'Banglades', 'BB' => 'Barbados', 'BY' => 'Belarusz', 'BE' => 'Belgium', 'BZ' => 'Belize', 'BJ' => 'Benin', 'BM' => 'Bermuda', 'BT' => 'Bhután', 'GW' => 'Bissau-Guinea', 'BO' => 'Bolívia', 'BA' => 'Bosznia-Hercegovina', 'BW' => 'Botswana', 'BV' => 'Bouvet-sziget', 'BR' => 'Brazília', 'IO' => 'Brit Indiai-óceáni Terület', 'VG' => 'Brit Virgin-szigetek', 'BN' => 'Brunei', 'BG' => 'Bulgária', 'BF' => 'Burkina Faso', 'BI' => 'Burundi', 'CL' => 'Chile', 'CY' => 'Ciprus', 'KM' => 'Comore-szigetek', 'CK' => 'Cook-szigetek', 'CR' => 'Costa Rica', 'CW' => 'Curaçao', 'TD' => 'Csád', 'CZ' => 'Csehország', 'DK' => 'Dánia', 'ZA' => 'Dél-afrikai Köztársaság', 'KR' => 'Dél-Korea', 'SS' => 'Dél-Szudán', 'GS' => 'Déli-Georgia és Déli-Sandwich-szigetek', 'DM' => 'Dominika', 'DO' => 'Dominikai Köztársaság', 'DJ' => 'Dzsibuti', 'EC' => 'Ecuador', 'GQ' => 'Egyenlítői-Guinea', 'US' => 'Egyesült Államok', 'AE' => 'Egyesült Arab Emírségek', 'GB' => 'Egyesült Királyság', 'EG' => 'Egyiptom', 'CI' => 'Elefántcsontpart', 'ER' => 'Eritrea', 'KP' => 'Észak-Korea', 'MK' => 'Észak-Macedónia', 'MP' => 'Északi Mariana-szigetek', 'EE' => 'Észtország', 'ET' => 'Etiópia', 'FK' => 'Falkland-szigetek', 'FO' => 'Feröer szigetek', 'FJ' => 'Fidzsi', 'FI' => 'Finnország', 'TF' => 'Francia Déli Területek', 'GF' => 'Francia Guyana', 'PF' => 'Francia Polinézia', 'FR' => 'Franciaország', 'PH' => 'Fülöp-szigetek', 'GA' => 'Gabon', 'GM' => 'Gambia', 'GH' => 'Ghána', 'GI' => 'Gibraltár', 'GR' => 'Görögország', 'GD' => 'Grenada', 'GL' => 'Grönland', 'GE' => 'Grúzia', 'GP' => 'Guadeloupe', 'GU' => 'Guam', 'GT' => 'Guatemala', 'GG' => 'Guernsey', 'GN' => 'Guinea', 'GY' => 'Guyana', 'HT' => 'Haiti', 'HM' => 'Heard-sziget és McDonald-szigetek', 'BQ' => 'Holland Karib-térség', 'NL' => 'Hollandia', 'HN' => 'Honduras', 'HK' => 'Hongkong KKT', 'HR' => 'Horvátország', 'IN' => 'India', 'ID' => 'Indonézia', 'IQ' => 'Irak', 'IR' => 'Irán', 'IE' => 'Írország', 'IS' => 'Izland', 'IL' => 'Izrael', 'JM' => 'Jamaica', 'JP' => 'Japán', 'YE' => 'Jemen', 'JE' => 'Jersey', 'JO' => 'Jordánia', 'KY' => 'Kajmán-szigetek', 'KH' => 'Kambodzsa', 'CM' => 'Kamerun', 'CA' => 'Kanada', 'CX' => 'Karácsony-sziget', 'QA' => 'Katar', 'KZ' => 'Kazahsztán', 'TL' => 'Kelet-Timor', 'KE' => 'Kenya', 'CN' => 'Kína', 'KG' => 'Kirgizisztán', 'KI' => 'Kiribati', 'CC' => 'Kókusz (Keeling)-szigetek', 'CO' => 'Kolumbia', 'CG' => 'Kongó – Brazzaville', 'CD' => 'Kongó – Kinshasa', 'CF' => 'Közép-afrikai Köztársaság', 'CU' => 'Kuba', 'KW' => 'Kuvait', 'LA' => 'Laosz', 'PL' => 'Lengyelország', 'LS' => 'Lesotho', 'LV' => 'Lettország', 'LB' => 'Libanon', 'LR' => 'Libéria', 'LY' => 'Líbia', 'LI' => 'Liechtenstein', 'LT' => 'Litvánia', 'LU' => 'Luxemburg', 'MG' => 'Madagaszkár', 'HU' => 'Magyarország', 'MO' => 'Makaó KKT', 'MY' => 'Malajzia', 'MW' => 'Malawi', 'MV' => 'Maldív-szigetek', 'ML' => 'Mali', 'MT' => 'Málta', 'IM' => 'Man-sziget', 'MA' => 'Marokkó', 'MH' => 'Marshall-szigetek', 'MQ' => 'Martinique', 'MR' => 'Mauritánia', 'MU' => 'Mauritius', 'YT' => 'Mayotte', 'MX' => 'Mexikó', 'MM' => 'Mianmar', 'FM' => 'Mikronézia', 'MD' => 'Moldova', 'MC' => 'Monaco', 'MN' => 'Mongólia', 'ME' => 'Montenegró', 'MS' => 'Montserrat', 'MZ' => 'Mozambik', 'NA' => 'Namíbia', 'NR' => 'Nauru', 'DE' => 'Németország', 'NP' => 'Nepál', 'NI' => 'Nicaragua', 'NE' => 'Niger', 'NG' => 'Nigéria', 'NU' => 'Niue', 'NF' => 'Norfolk-sziget', 'NO' => 'Norvégia', 'EH' => 'Nyugat-Szahara', 'IT' => 'Olaszország', 'OM' => 'Omán', 'RU' => 'Oroszország', 'AM' => 'Örményország', 'PK' => 'Pakisztán', 'PW' => 'Palau', 'PS' => 'Palesztin Autonómia', 'PA' => 'Panama', 'PG' => 'Pápua Új-Guinea', 'PY' => 'Paraguay', 'PE' => 'Peru', 'PN' => 'Pitcairn-szigetek', 'PT' => 'Portugália', 'PR' => 'Puerto Rico', 'RE' => 'Réunion', 'RO' => 'Románia', 'RW' => 'Ruanda', 'KN' => 'Saint Kitts és Nevis', 'LC' => 'Saint Lucia', 'MF' => 'Saint Martin', 'VC' => 'Saint Vincent és a Grenadine-szigetek', 'BL' => 'Saint-Barthélemy', 'PM' => 'Saint-Pierre és Miquelon', 'SB' => 'Salamon-szigetek', 'SV' => 'Salvador', 'SM' => 'San Marino', 'ST' => 'São Tomé és Príncipe', 'SC' => 'Seychelle-szigetek', 'SL' => 'Sierra Leone', 'SX' => 'Sint Maarten', 'ES' => 'Spanyolország', 'LK' => 'Srí Lanka', 'SR' => 'Suriname', 'CH' => 'Svájc', 'SJ' => 'Svalbard és Jan Mayen', 'SE' => 'Svédország', 'WS' => 'Szamoa', 'SA' => 'Szaúd-Arábia', 'SN' => 'Szenegál', 'SH' => 'Szent Ilona', 'RS' => 'Szerbia', 'SG' => 'Szingapúr', 'SY' => 'Szíria', 'SK' => 'Szlovákia', 'SI' => 'Szlovénia', 'SO' => 'Szomália', 'SD' => 'Szudán', 'SZ' => 'Szváziföld', 'TJ' => 'Tádzsikisztán', 'TW' => 'Tajvan', 'TZ' => 'Tanzánia', 'TH' => 'Thaiföld', 'TG' => 'Togo', 'TK' => 'Tokelau', 'TO' => 'Tonga', 'TR' => 'Törökország', 'TT' => 'Trinidad és Tobago', 'TN' => 'Tunézia', 'TC' => 'Turks- és Caicos-szigetek', 'TV' => 'Tuvalu', 'TM' => 'Türkmenisztán', 'UG' => 'Uganda', 'NC' => 'Új-Kaledónia', 'NZ' => 'Új-Zéland', 'UA' => 'Ukrajna', 'UY' => 'Uruguay', 'UZ' => 'Üzbegisztán', 'VU' => 'Vanuatu', 'VA' => 'Vatikán', 'VE' => 'Venezuela', 'VN' => 'Vietnám', 'WF' => 'Wallis és Futuna', 'ZM' => 'Zambia', 'ZW' => 'Zimbabwe', 'CV' => 'Zöld-foki Köztársaság', ];
#include <stdbool.h> #include <stdint.h> #include <math.h> #include "platform.h" #if defined(USE_MAG_AK8963) || defined(USE_MAG_SPI_AK8963) #include "build/debug.h" #include "common/axis.h" #include "common/maths.h" #include "common/utils.h" #include "drivers/bus.h" #include "drivers/bus_i2c.h" #include "drivers/bus_i2c_busdev.h" #include "drivers/bus_spi.h" #include "drivers/io.h" #include "drivers/sensor.h" #include "drivers/time.h" #include "drivers/compass/compass.h" #include "drivers/accgyro/accgyro.h" #include "drivers/accgyro/accgyro_mpu.h" #include "drivers/accgyro/accgyro_mpu6500.h" #include "drivers/accgyro/accgyro_spi_mpu6500.h" #include "drivers/accgyro/accgyro_spi_mpu9250.h" #include "drivers/compass/compass_ak8963.h" #include "scheduler/scheduler.h" // This sensor is also available also part of the MPU-9250 connected to the secondary I2C bus. // AK8963, mag sensor address #define <API key> 0x0C #define AK8963_DEVICE_ID 0x48 // Registers #define AK8963_MAG_REG_WIA 0x00 #define AK8963_MAG_REG_INFO 0x01 #define AK8963_MAG_REG_ST1 0x02 #define AK8963_MAG_REG_HXL 0x03 #define AK8963_MAG_REG_HXH 0x04 #define AK8963_MAG_REG_HYL 0x05 #define AK8963_MAG_REG_HYH 0x06 #define AK8963_MAG_REG_HZL 0x07 #define AK8963_MAG_REG_HZH 0x08 #define AK8963_MAG_REG_ST2 0x09 #define <API key> 0x0A #define <API key> 0x0B #define AK8963_MAG_REG_ASCT 0x0C // self test #define <API key> 0x0F #define AK8963_MAG_REG_ASAX 0x10 // Fuse ROM x-axis sensitivity adjustment value #define AK8963_MAG_REG_ASAY 0x11 // Fuse ROM y-axis sensitivity adjustment value #define AK8963_MAG_REG_ASAZ 0x12 // Fuse ROM z-axis sensitivity adjustment value #define READ_FLAG 0x80 #define I2C_SLV0_EN 0x80 #define ST1_DATA_READY 0x01 #define ST1_DATA_OVERRUN 0x02 #define <API key> 0x08 #define <API key> 0x00 #define CNTL1_MODE_ONCE 0x01 #define CNTL1_MODE_CONT1 0x02 #define CNTL1_MODE_CONT2 0x06 #define <API key> 0x08 #define CNTL1_MODE_FUSE_ROM 0x0F #define CNTL1_BIT_14_BIT 0x00 #define CNTL1_BIT_16_BIT 0x10 #define CNTL2_SOFT_RESET 0x01 #define I2CDIS_DISABLE_MASK 0x1D #if defined(USE_MAG_AK8963) && (defined(<API key>) || defined(<API key>)) static bool <API key>(const busDevice_t *bus, uint8_t reg, uint8_t data) { spiBusWriteRegister(bus, reg, data); delayMicroseconds(10); return true; } static bool <API key>(const busDevice_t *slavedev, uint8_t reg, uint8_t *buf, uint8_t len) { const busDevice_t *bus = slavedev->busdev_u.mpuSlave.master; <API key>(bus, <API key>, slavedev->busdev_u.mpuSlave.address | READ_FLAG); // set I2C slave address for read <API key>(bus, MPU_RA_I2C_SLV0_REG, reg); // set I2C slave register <API key>(bus, <API key>, (len & 0x0F) | I2C_SLV0_EN); // read number of bytes delay(4); __disable_irq(); bool ack = <API key>(bus, <API key>, buf, len); // read I2C __enable_irq(); return ack; } static bool <API key>(const busDevice_t *slavedev, uint8_t reg, uint8_t data) { const busDevice_t *bus = slavedev->busdev_u.mpuSlave.master; <API key>(bus, <API key>, slavedev->busdev_u.mpuSlave.address); // set I2C slave address for write <API key>(bus, MPU_RA_I2C_SLV0_REG, reg); // set I2C slave register <API key>(bus, MPU_RA_I2C_SLV0_DO, data); // set I2C sLave value <API key>(bus, <API key>, (1 & 0x0F) | I2C_SLV0_EN); // write 1 byte return true; } typedef struct queuedReadState_s { bool waiting; uint8_t len; uint32_t readStartedAt; // time read was queued in micros. } queuedReadState_t; static queuedReadState_t queuedRead = { false, 0, 0}; static bool <API key>(const busDevice_t *slavedev, uint8_t reg, uint8_t len) { if (queuedRead.waiting) { return false; } const busDevice_t *bus = slavedev->busdev_u.mpuSlave.master; queuedRead.len = len; <API key>(bus, <API key>, slavedev->busdev_u.mpuSlave.address | READ_FLAG); // set I2C slave address for read <API key>(bus, MPU_RA_I2C_SLV0_REG, reg); // set I2C slave register <API key>(bus, <API key>, (len & 0x0F) | I2C_SLV0_EN); // read number of bytes queuedRead.readStartedAt = micros(); queuedRead.waiting = true; return true; } static uint32_t <API key>(void) { if (!queuedRead.waiting) { return 0; } int32_t timeSinceStarted = micros() - queuedRead.readStartedAt; int32_t timeRemaining = 8000 - timeSinceStarted; if (timeRemaining < 0) { return 0; } return timeRemaining; } static bool <API key>(const busDevice_t *slavedev, uint8_t *buf) { uint32_t timeRemaining = <API key>(); const busDevice_t *bus = slavedev->busdev_u.mpuSlave.master; if (timeRemaining > 0) { delayMicroseconds(timeRemaining); } queuedRead.waiting = false; <API key>(bus, <API key>, buf, queuedRead.len); // read I2C buffer return true; } static bool ak8963SlaveReadData(const busDevice_t *busdev, uint8_t *buf) { typedef enum { CHECK_STATUS = 0, WAITING_FOR_STATUS, WAITING_FOR_DATA } ak8963ReadState_e; static ak8963ReadState_e state = CHECK_STATUS; bool ack = false; // we currently need a different approach for the MPU9250 connected via SPI. // we cannot use the <API key>() method for SPI, it is to slow and blocks for far too long. bool retry = true; restart: switch (state) { case CHECK_STATUS: { <API key>(busdev, AK8963_MAG_REG_ST1, 1); state = WAITING_FOR_STATUS; return false; } case WAITING_FOR_STATUS: { uint32_t timeRemaining = <API key>(); if (timeRemaining) { return false; } ack = <API key>(busdev, &buf[0]); uint8_t status = buf[0]; if (!ack || (status & ST1_DATA_READY) == 0) { // too early. queue the status read again state = CHECK_STATUS; if (retry) { retry = false; goto restart; } return false; } // read the 6 bytes of data and the status2 register <API key>(busdev, AK8963_MAG_REG_HXL, 7); state = WAITING_FOR_DATA; return false; } case WAITING_FOR_DATA: { uint32_t timeRemaining = <API key>(); if (timeRemaining) { return false; } ack = <API key>(busdev, &buf[0]); state = CHECK_STATUS; } } return ack; } #endif static bool <API key>(const busDevice_t *busdev, uint8_t reg, uint8_t *buf, uint8_t len) { #if defined(USE_MAG_AK8963) && (defined(<API key>) || defined(<API key>)) if (busdev->bustype == BUSTYPE_MPU_SLAVE) { return <API key>(busdev, reg, buf, len); } #endif return <API key>(busdev, reg, buf, len); } static bool ak8963WriteRegister(const busDevice_t *busdev, uint8_t reg, uint8_t data) { #if defined(USE_MAG_AK8963) && (defined(<API key>) || defined(<API key>)) if (busdev->bustype == BUSTYPE_MPU_SLAVE) { return <API key>(busdev, reg, data); } #endif return busWriteRegister(busdev, reg, data); } static bool <API key>(const busDevice_t *busdev, uint8_t *buf) { uint8_t status; bool ack = <API key>(busdev, AK8963_MAG_REG_ST1, &status, 1); if (!ack || (status & ST1_DATA_READY) == 0) { return false; } return <API key>(busdev, AK8963_MAG_REG_HXL, buf, 7); } static int16_t parseMag(uint8_t *raw, int16_t gain) { int ret = (int16_t)(raw[1] << 8 | raw[0]) * gain / 256; return constrain(ret, INT16_MIN, INT16_MAX); } static bool ak8963Read(magDev_t *mag, int16_t *magData) { bool ack = false; uint8_t buf[7]; const busDevice_t *busdev = &mag->busdev; switch (busdev->bustype) { #if defined(USE_MAG_SPI_AK8963) || defined(USE_MAG_AK8963) case BUSTYPE_I2C: case BUSTYPE_SPI: ack = <API key>(busdev, buf); break; #endif #if defined(USE_MAG_AK8963) && (defined(<API key>) || defined(<API key>)) case BUSTYPE_MPU_SLAVE: ack = ak8963SlaveReadData(busdev, buf); break; #endif default: break; } uint8_t status2 = buf[6]; if (!ack) { return false; } ak8963WriteRegister(busdev, <API key>, CNTL1_BIT_16_BIT | CNTL1_MODE_ONCE); // start reading again uint8_t status2 = buf[6]; if (status2 & <API key>) { return false; } magData[X] = parseMag(buf + 0, mag->magGain[X]); magData[Y] = parseMag(buf + 2, mag->magGain[Y]); magData[Z] = parseMag(buf + 4, mag->magGain[Z]); return true; } static bool ak8963Init(magDev_t *mag) { uint8_t asa[3]; uint8_t status; const busDevice_t *busdev = &mag->busdev; ak8963WriteRegister(busdev, <API key>, <API key>); // power down before entering fuse mode ak8963WriteRegister(busdev, <API key>, CNTL1_MODE_FUSE_ROM); // Enter Fuse ROM access mode <API key>(busdev, AK8963_MAG_REG_ASAX, asa, sizeof(asa)); // Read the x-, y-, and z-axis calibration values mag->magGain[X] = asa[X] + 128; mag->magGain[Y] = asa[Y] + 128; mag->magGain[Z] = asa[Z] + 128; ak8963WriteRegister(busdev, <API key>, <API key>); // power down after reading. // Clear status registers <API key>(busdev, AK8963_MAG_REG_ST1, &status, 1); <API key>(busdev, AK8963_MAG_REG_ST2, &status, 1); // Trigger first measurement ak8963WriteRegister(busdev, <API key>, CNTL1_BIT_16_BIT | CNTL1_MODE_ONCE); return true; } void ak8963BusInit(busDevice_t *busdev) { switch (busdev->bustype) { #ifdef USE_MAG_AK8963 case BUSTYPE_I2C: UNUSED(busdev); break; #endif #ifdef USE_MAG_SPI_AK8963 case BUSTYPE_SPI: IOHi(busdev->busdev_u.spi.csnPin); // Disable IOInit(busdev->busdev_u.spi.csnPin, OWNER_COMPASS_CS, 0); IOConfigGPIO(busdev->busdev_u.spi.csnPin, IOCFG_OUT_PP); #ifdef USE_SPI_TRANSACTION <API key>(busdev, <API key>, SPI_CLOCK_STANDARD); #else spiBusSetDivisor(busdev, SPI_CLOCK_STANDARD); #endif break; #endif #if defined(USE_MAG_AK8963) && (defined(<API key>) || defined(<API key>)) case BUSTYPE_MPU_SLAVE: rescheduleTask(TASK_COMPASS, TASK_PERIOD_HZ(40)); // initialze I2C master via SPI bus <API key>(busdev->busdev_u.mpuSlave.master, MPU_RA_INT_PIN_CFG, <API key> | <API key>); <API key>(busdev->busdev_u.mpuSlave.master, MPU_RA_I2C_MST_CTRL, 0x0D); // I2C multi-master / 400kHz <API key>(busdev->busdev_u.mpuSlave.master, MPU_RA_USER_CTRL, 0x30); // I2C master mode, SPI mode only break; #endif default: break; } } void ak8963BusDeInit(const busDevice_t *busdev) { switch (busdev->bustype) { #ifdef USE_MAG_AK8963 case BUSTYPE_I2C: UNUSED(busdev); break; #endif #ifdef USE_MAG_SPI_AK8963 case BUSTYPE_SPI: spiPreinitByIO(busdev->busdev_u.spi.csnPin); break; #endif #if defined(USE_MAG_AK8963) && (defined(<API key>) || defined(<API key>)) case BUSTYPE_MPU_SLAVE: <API key>(busdev->busdev_u.mpuSlave.master, MPU_RA_INT_PIN_CFG, <API key>); break; #endif default: break; } } bool ak8963Detect(magDev_t *mag) { uint8_t sig = 0; busDevice_t *busdev = &mag->busdev; if ((busdev->bustype == BUSTYPE_I2C || busdev->bustype == BUSTYPE_MPU_SLAVE) && busdev->busdev_u.mpuSlave.address == 0) { busdev->busdev_u.mpuSlave.address = <API key>; } ak8963BusInit(busdev); ak8963WriteRegister(busdev, <API key>, CNTL2_SOFT_RESET); // reset MAG delay(4); bool ack = <API key>(busdev, AK8963_MAG_REG_WIA, &sig, 1); // check for AK8963 if (ack && sig == AK8963_DEVICE_ID) // 0x48 / 01001000 / 'H' { mag->init = ak8963Init; mag->read = ak8963Read; return true; } ak8963BusDeInit(busdev); return false; } #endif
#include "config.h" #include <gtk/gtk.h> #include "gui-types.h" #include "core/gimp.h" #include "core/gimpcontext.h" #include "widgets/gimpdialogfactory.h" #include "widgets/gimpdock.h" #include "widgets/gimpdockcolumns.h" #include "widgets/gimpdockcontainer.h" #include "widgets/gimpdockwindow.h" #include "widgets/gimptoolbox.h" #include "display/gimpdisplay.h" #include "display/gimpdisplayshell.h" #include "display/<API key>.h" #include "display/gimpimagewindow.h" #include "menus/menus.h" #include "gimpuiconfigurer.h" enum { PROP_0, PROP_GIMP }; struct <API key> { Gimp *gimp; }; static void <API key> (GObject *object, guint property_id, const GValue *value, GParamSpec *pspec); static void <API key> (GObject *object, guint property_id, GValue *value, GParamSpec *pspec); static void <API key> (GimpUIConfigurer *ui_configurer, GimpImageWindow *uber_image_window); static void <API key> (GimpUIConfigurer *ui_configurer, GimpImageWindow *source_image_window, GimpImageWindow *target_image_window); static void <API key> (GimpUIConfigurer *ui_configurer, GimpImageWindow *source_image_window); static void <API key> (GimpUIConfigurer *ui_configurer, GimpDockColumns *dock_columns, GimpAlignmentType <API key>); static void <API key> (GimpUIConfigurer *ui_configurer, GimpImageWindow *source_image_window); static void <API key> (GimpUIConfigurer *ui_configurer); static void <API key> (GimpUIConfigurer *ui_configurer); static GimpImageWindow * <API key> (GimpUIConfigurer *ui_configurer); G_DEFINE_TYPE (GimpUIConfigurer, gimp_ui_configurer, GIMP_TYPE_OBJECT) #define parent_class <API key> static void <API key> (<API key> *klass) { GObjectClass *object_class = G_OBJECT_CLASS (klass); object_class->set_property = <API key>; object_class->get_property = <API key>; <API key> (object_class, PROP_GIMP, g_param_spec_object ("gimp", NULL, NULL, GIMP_TYPE_GIMP, <API key> | <API key>)); <API key> (klass, sizeof (<API key>)); } static void <API key> (GimpUIConfigurer *ui_configurer) { ui_configurer->p = <API key> (ui_configurer, <API key>, <API key>); } static void <API key> (GObject *object, guint property_id, const GValue *value, GParamSpec *pspec) { GimpUIConfigurer *ui_configurer = GIMP_UI_CONFIGURER (object); switch (property_id) { case PROP_GIMP: ui_configurer->p->gimp = g_value_get_object (value); /* don't ref */ break; default: <API key> (object, property_id, pspec); break; } } static void <API key> (GObject *object, guint property_id, GValue *value, GParamSpec *pspec) { GimpUIConfigurer *ui_configurer = GIMP_UI_CONFIGURER (object); switch (property_id) { case PROP_GIMP: g_value_set_object (value, ui_configurer->p->gimp); break; default: <API key> (object, property_id, pspec); break; } } static void <API key> (GtkWindow *window, gint *out_x, gint *out_y) { gint x, y, w, h; <API key> (window, &x, &y); gtk_window_get_size (window, &w, &h); if (out_x) *out_x = x + w / 2; if (out_y) *out_y = y + h / 2; } /** * <API key>: * @window_a: * @window_b: * * Returns: At what side @window_b is relative to @window_a. Either * GIMP_ALIGN_LEFT or GIMP_ALIGN_RIGHT. **/ static GimpAlignmentType <API key> (GtkWindow *window_a, GtkWindow *window_b) { gint a_x, b_x; <API key> (window_a, &a_x, NULL); <API key> (window_b, &b_x, NULL); return b_x < a_x ? GIMP_ALIGN_LEFT : GIMP_ALIGN_RIGHT; } static void <API key> (GimpUIConfigurer *ui_configurer, GimpImageWindow *uber_image_window) { GList *dialogs = NULL; GList *dialog_iter = NULL; dialogs = g_list_copy (<API key> (<API key> ())); for (dialog_iter = dialogs; dialog_iter; dialog_iter = dialog_iter->next) { GimpDockWindow *dock_window; GimpDockContainer *dock_container; GimpDockColumns *dock_columns; GList *docks; GList *dock_iter; if (!GIMP_IS_DOCK_WINDOW (dialog_iter->data)) continue; dock_window = GIMP_DOCK_WINDOW (dialog_iter->data); /* If the dock window is on the left side of the image window, * move the docks to the left side. If the dock window is on the * right side, move the docks to the right side of the image * window. */ if (<API key> (GTK_WINDOW (uber_image_window), GTK_WINDOW (dock_window)) == GIMP_ALIGN_LEFT) dock_columns = <API key> (uber_image_window); else dock_columns = <API key> (uber_image_window); dock_container = GIMP_DOCK_CONTAINER (dock_window); <API key> (G_OBJECT (dock_window), (gpointer) &dock_window); docks = <API key> (dock_container); for (dock_iter = docks; dock_iter; dock_iter = dock_iter->next) { GimpDock *dock = GIMP_DOCK (dock_iter->data); /* Move the dock from the image window to the dock columns * widget. Note that we need a ref while the dock is parentless */ g_object_ref (dock); <API key> (dock_window, dock); <API key> (dock_columns, dock, -1); g_object_unref (dock); } g_list_free (docks); if (dock_window) <API key> (G_OBJECT (dock_window), (gpointer) &dock_window); /* Kill the window if removing the dock didn't destroy it * already. This will be the case for the toolbox dock window */ if (GTK_IS_WIDGET (dock_window)) { guint docks_len; docks = <API key> (dock_container); docks_len = g_list_length (docks); if (docks_len == 0) { <API key> (<API key> (), GTK_WIDGET (dock_window)); gtk_widget_destroy (GTK_WIDGET (dock_window)); } g_list_free (docks); } } g_list_free (dialogs); } /** * <API key>: * @ui_configurer: * @source_image_window: * @target_image_window: * * Move all display shells from one image window to the another. **/ static void <API key> (GimpUIConfigurer *ui_configurer, GimpImageWindow *source_image_window, GimpImageWindow *target_image_window) { while (<API key> (source_image_window) > 0) { GimpDisplayShell *shell; shell = <API key> (source_image_window, 0); g_object_ref (shell); <API key> (source_image_window, shell); <API key> (target_image_window, shell); g_object_unref (shell); } } /** * <API key>: * @ui_configurer: * @image_window: * * Move out the docks from the image window. **/ static void <API key> (GimpUIConfigurer *ui_configurer, GimpImageWindow *image_window) { GimpDockColumns *left_docks = NULL; GimpDockColumns *right_docks = NULL; left_docks = <API key> (image_window); right_docks = <API key> (image_window); <API key> (ui_configurer, left_docks, GIMP_ALIGN_LEFT); <API key> (ui_configurer, right_docks, GIMP_ALIGN_RIGHT); } /** * <API key>: * @dock_columns: * @<API key>: At what side of the screen the dock window * should be put. * * Moves docks in @dock_columns into a new #GimpDockWindow and * position it on the screen in a non-overlapping manner. */ static void <API key> (GimpUIConfigurer *ui_configurer, GimpDockColumns *dock_columns, GimpAlignmentType <API key>) { GdkScreen *screen = <API key> (GTK_WIDGET (dock_columns)); GList *docks = g_list_copy (<API key> (dock_columns)); GList *iter = NULL; gboolean contains_toolbox = FALSE; GtkWidget *dock_window = NULL; GtkAllocation original_size = { 0, 0, 0, 0 }; /* Are there docks to move at all? */ if (! docks) return; /* Remember the size so we can set the new dock window to the same * size */ <API key> (GTK_WIDGET (dock_columns), &original_size); /* Do we need a toolbox window? */ for (iter = docks; iter; iter = iter->next) { GimpDock *dock = GIMP_DOCK (iter->data); if (GIMP_IS_TOOLBOX (dock)) { contains_toolbox = TRUE; break; } } /* Create a dock window to put the dock in. Checking for * GIMP_IS_TOOLBOX() is kind of ugly but not a disaster. We need * the dock window correctly configured if we create it for the * toolbox */ dock_window = <API key> (<API key> (), screen, NULL /*ui_manager*/, (contains_toolbox ? "gimp-toolbox-window" : "gimp-dock-window"), -1 /*view_size*/, FALSE /*present*/); for (iter = docks; iter; iter = iter->next) { GimpDock *dock = GIMP_DOCK (iter->data); /* Move the dock to the window */ g_object_ref (dock); <API key> (dock_columns, dock); <API key> (GIMP_DOCK_WINDOW (dock_window), dock, -1); g_object_unref (dock); } /* Position the window */ if (<API key> == GIMP_ALIGN_LEFT) <API key> (GTK_WINDOW (dock_window), "+0+0"); else if (<API key> == GIMP_ALIGN_RIGHT) <API key> (GTK_WINDOW (dock_window), "-0+0"); else <API key> (); /* Try to keep the same size */ <API key> (GTK_WINDOW (dock_window), original_size.width, original_size.height); /* Don't forget to show the window */ gtk_widget_show (dock_window); g_list_free (docks); } /** * <API key>: * @ui_configurer: * @source_image_window: * * Create one image window per display shell and move it there. **/ static void <API key> (GimpUIConfigurer *ui_configurer, GimpImageWindow *source_image_window) { /* The last display shell remains in its window */ while (<API key> (source_image_window) > 1) { GimpImageWindow *new_image_window; GimpDisplayShell *shell; /* Create a new image window */ new_image_window = <API key> (ui_configurer->p->gimp, NULL, global_menu_factory, <API key> ()); /* Move the shell there */ shell = <API key> (source_image_window, 1); g_object_ref (shell); <API key> (source_image_window, shell); <API key> (new_image_window, shell); g_object_unref (shell); /* FIXME: If we don't set a size request here the window will be * too small. Get rid of this hack and fix it the proper way */ <API key> (GTK_WIDGET (new_image_window), 640, 480); /* Show after we have added the shell */ gtk_widget_show (GTK_WIDGET (new_image_window)); } } /** * <API key>: * @ui_configurer: * * Move docks and display shells into a single window. **/ static void <API key> (GimpUIConfigurer *ui_configurer) { Gimp *gimp = ui_configurer->p->gimp; GList *windows = <API key> (gimp); GList *iter = NULL; GimpImageWindow *uber_image_window = NULL; /* Get and setup the window to put everything in */ uber_image_window = <API key> (ui_configurer); /* Mve docks to the left and right side of the image window */ <API key> (ui_configurer, uber_image_window); /* Move image shells from other windows to the uber image window */ for (iter = windows; iter; iter = g_list_next (iter)) { GimpImageWindow *image_window = GIMP_IMAGE_WINDOW (iter->data); /* Don't move stuff to itself */ if (image_window == uber_image_window) continue; /* Put the displays in the rest of the image windows into * the uber image window */ <API key> (ui_configurer, image_window, uber_image_window); /* Destroy the window */ <API key> (image_window); } g_list_free (windows); } /** * <API key>: * @ui_configurer: * * Moves all display shells into their own image window. **/ static void <API key> (GimpUIConfigurer *ui_configurer) { Gimp *gimp = ui_configurer->p->gimp; GList *windows = <API key> (gimp); GList *iter = NULL; for (iter = windows; iter; iter = g_list_next (iter)) { GimpImageWindow *image_window = GIMP_IMAGE_WINDOW (iter->data); <API key> (ui_configurer, image_window); <API key> (ui_configurer, image_window); } g_list_free (windows); } /** * <API key>: * @ui_configurer: * * Returns: The window to be used as the main window for single-window * mode. **/ static GimpImageWindow * <API key> (GimpUIConfigurer *ui_configurer) { Gimp *gimp = ui_configurer->p->gimp; GimpDisplay *display = <API key> (gimp)->data; GimpDisplayShell *shell = <API key> (display); GimpImageWindow *image_window = <API key> (shell); return image_window; } /** * <API key>: * @ui_configurer: * * Updates the appearance of all shells in all image windows, so they * do whatever they deem neccessary to fit the new UI mode mode. **/ static void <API key> (GimpUIConfigurer *ui_configurer) { Gimp *gimp = ui_configurer->p->gimp; GList *windows = <API key> (gimp); GList *list; for (list = windows; list; list = g_list_next (list)) { GimpImageWindow *image_window = GIMP_IMAGE_WINDOW (list->data); gint n_shells; gint i; n_shells = <API key> (image_window); for (i = 0; i < n_shells; i++) { GimpDisplayShell *shell; shell = <API key> (image_window, i); <API key> (shell); } } g_list_free (windows); } /** * <API key>: * @ui_configurer: * @single_window_mode: * * Configure the UI. **/ void <API key> (GimpUIConfigurer *ui_configurer, gboolean single_window_mode) { if (single_window_mode) <API key> (ui_configurer); else <API key> (ui_configurer); <API key> (ui_configurer); }
<?php namespace AfriCC\Tests\EPP\DOM; use AfriCC\EPP\DOM\DOMElement; use DOMDocument; use PHPUnit\Framework\TestCase; class DOMElementTest extends TestCase { public function testHasChildNodes() { $dom = new DOMDocument('1.0', 'utf-8'); $parent = new DOMElement('foo'); $dom->appendChild($parent); $this->assertFalse($parent->hasChildNodes()); $child = new DOMElement('bar'); $parent->appendChild($child); $this->assertTrue($parent->hasChildNodes()); } }
// Generated by typings declare module "hapi" { import http = require("http"); import stream = require("stream"); import Events = require("events"); import url = require("url"); interface IDictionary<T> { [key: string]: T; } interface IThenable<R> { then<U>(onFulfilled?: (value: R) => U | IThenable<U>, onRejected?: (error: any) => U | IThenable<U>): IThenable<U>; then<U>(onFulfilled?: (value: R) => U | IThenable<U>, onRejected?: (error: any) => void): IThenable<U>; } interface IPromise<R> extends IThenable<R> { then<U>(onFulfilled?: (value: R) => U | IThenable<U>, onRejected?: (error: any) => U | IThenable<U>): IPromise<U>; then<U>(onFulfilled?: (value: R) => U | IThenable<U>, onRejected?: (error: any) => void): IPromise<U>; catch<U>(onRejected?: (error: any) => U | IThenable<U>): IPromise<U>; } export interface IHeaderOptions { append?: boolean; separator?: string; override?: boolean; duplicate?: boolean; } export interface IBoom extends Error { /** if true, indicates this is a Boom object instance. */ isBoom: boolean; /** convenience bool indicating status code >= 500. */ isServer: boolean; /** the error message. */ message: string; /** the formatted response.Can be directly manipulated after object construction to return a custom error response.Allowed root keys: */ output: { /** the HTTP status code (typically 4xx or 5xx). */ statusCode: number; /** an object containing any HTTP headers where each key is a header name and value is the header content. */ headers: IDictionary<string>; /** the formatted object used as the response payload (stringified).Can be directly manipulated but any changes will be lost if reformat() is called.Any content allowed and by default includes the following content: */ payload: { /** the HTTP status code, derived from error.output.statusCode. */ statusCode: number; /** the HTTP status message (e.g. 'Bad Request', 'Internal Server Error') derived from statusCode. */ error: string; /** the error message derived from error.message. */ message: string; }; }; /** reformat()rebuilds error.output using the other object properties. */ reformat(): void; } /** cache functionality via the "CatBox" module. */ export interface ICatBoxCacheOptions { /** a prototype function or catbox engine object. */ engine: any; /** an identifier used later when provisioning or configuring caching for server methods or plugins. Each cache name must be unique. A single item may omit the name option which defines the default cache. If every cache includes a name, a default memory cache is provisions as well. */ name?: string; /** if true, allows multiple cache users to share the same segment (e.g. multiple methods using the same cache storage container). Default to false. */ shared?: boolean; } /** Any connections configuration server defaults can be included to override and customize the individual connection. */ export interface <API key> extends <API key> { /** - the public hostname or IP address. Used only to set server.info.host and server.info.uri. If not configured, defaults to the operating system hostname and if not available, to 'localhost'.*/ host?: string; /** - sets the host name or IP address the connection will listen on.If not configured, defaults to host if present, otherwise to all available network interfaces (i.e. '0.0.0.0').Set to 127.0.0.1 or localhost to restrict connection to only those coming from the same machine.*/ address?: string; /** - the TCP port the connection will listen to.Defaults to an ephemeral port (0) which uses an available port when the server is started (and assigned to server.info.port).If port is a string containing a '/' character, it is used as a UNIX domain socket path and if it starts with '\.\pipe' as a Windows named pipe.*/ port?: string | number; uri?: string; /** - optional node.js HTTP (or HTTPS) http.Server object or any compatible object.If the listener needs to be manually started, set autoListen to false.If the listener uses TLS, set tls to true.*/ listener?: any; /** - indicates that the connection.listener will be started manually outside the framework.Cannot be specified with a port setting.Defaults to true.*/ autoListen?: boolean; /** caching headers configuration: */ cache?: { /** - an array of HTTP response status codes (e.g. 200) which are allowed to include a valid caching directive.Defaults to [200]. */ statuses: number[]; }; /** - a string or string array of labels used to server.select() specific connections matching the specified labels.Defaults to an empty array [](no labels).*/ labels?: string | string[]; /** - used to create an HTTPS connection.The tls object is passed unchanged as options to the node.js HTTPS server as described in the node.js HTTPS documentation.Set to true when passing a listener object that has been configured to use TLS directly. */ tls?: boolean | { key?: string; cert?: string; pfx?: string; } | Object; } export interface <API key> { /** <API key> connection configuration which can be accessed via connection.settings.app. Provides a safe place to store application configuration without potential conflicts with the framework internals. Should not be used to configure plugins which should use plugins[name]. Note the difference between connection.settings.app which is used to store configuration values and connection.app which is meant for storing run-time state. */ app?: any; /** connection load limits configuration where: */ load?: { /** maximum V8 heap size over which incoming requests are rejected with an HTTP Server Timeout (503) response. Defaults to 0 (no limit). */ maxHeapUsedBytes: number; /** maximum process RSS size over which incoming requests are rejected with an HTTP Server Timeout (503) response. Defaults to 0 (no limit). */ maxRssBytes: number; /** maximum event loop delay duration in milliseconds over which incoming requests are rejected with an HTTP Server Timeout (503) response. Defaults to 0 (no limit). */ maxEventLoopDelay: number; }; /** plugin-specific configuration which can later be accessed via connection.settings.plugins. Provides a place to store and pass connection-specific plugin configuration. plugins is an object where each key is a plugin name and the value is the configuration. Note the difference between connection.settings.plugins which is used to store configuration values and connection.plugins which is meant for storing run-time state. */ plugins?: any; /** controls how incoming request URIs are matched against the routing table: */ router?: { /** determines whether the paths '/example' and '/EXAMPLE' are considered different resources. Defaults to true. */ isCaseSensitive: boolean; /** removes trailing slashes on incoming paths. Defaults to false. */ stripTrailingSlash: boolean; }; /** a route options object used to set the default configuration for every route. */ routes?: <API key>; state?: IServerState; } /** Note that the options object is deeply cloned and cannot contain any values that are unsafe to perform deep copy on.*/ export interface IServerOptions { /** <API key> configuration which can later be accessed via server.settings.app. Note the difference between server.settings.app which is used to store static configuration values and server.app which is meant for storing run-time state. Defaults to {}. */ app?: any; /** sets up server-side caching. Every server includes a default cache for storing application state. By default, a simple memory-based cache is created which has limited capacity and capabilities. hapi uses catbox for its cache which includes support for common storage solutions (e.g. Redis, MongoDB, Memcached, and Riak). Caching is only utilized if methods and plugins explicitly store their state in the cache. The server cache configuration only defines the storage container itself. cache can be assigned: a prototype function (usually obtained by calling require() on a catbox strategy such as require('catbox-redis')). a configuration object with the following options: enginea prototype function or catbox engine object. namean identifier used later when provisioning or configuring caching for server methods or plugins. Each cache name must be unique. A single item may omit the name option which defines the default cache. If every cache includes a name, a default memory cache is provisions as well. sharedif true, allows multiple cache users to share the same segment (e.g. multiple methods using the same cache storage container). Default to false. other options passed to the catbox strategy used. an array of the above object for configuring multiple cache instances, each with a unique name. When an array of objects is provided, multiple cache connections are established and each array item (except one) must include a name. */ cache?: string | ICatBoxCacheOptions | Array<ICatBoxCacheOptions> | any; /** sets the default connections configuration which can be overridden by each connection where: */ connections?: <API key>; /** determines which logged events are sent to the console (this should only be used for development and does not affect which events are actually logged internally and recorded). Set to false to disable all console logging, or to an object*/ debug?: boolean | { /** - a string array of server log tags to be displayed via console.error() when the events are logged via server.log() as well as internally generated server logs. For example, to display all errors, set the option to ['error']. To turn off all console debug messages set it to false. Defaults to uncaught errors thrown in external code (these errors are handled automatically and result in an Internal Server Error response) or runtime errors due to developer error. */ log: string[]; /** - a string array of request log tags to be displayed via console.error() when the events are logged via request.log() as well as internally generated request logs. For example, to display all errors, set the option to ['error']. To turn off all console debug messages set it to false. Defaults to uncaught errors thrown in external code (these errors are handled automatically and result in an Internal Server Error response) or runtime errors due to developer error.*/ request: string[]; }; /** file system related settings*/ files?: { /** sets the maximum number of file etag hash values stored in the etags cache. Defaults to 10000.*/ etagsCacheMaxSize?: number; }; /** process load monitoring*/ load?: { /** the frequency of sampling in milliseconds. Defaults to 0 (no sampling).*/ sampleInterval?: number; }; mime?: any; /** if true, does not load the inert (file and directory support), h2o2 (proxy support), and vision (views support) plugins automatically. The plugins can be loaded manually after construction. Defaults to false (plugins loaded). */ minimal?: boolean; /** plugin-specific configuration which can later be accessed via server.settings.plugins. plugins is an object where each key is a plugin name and the value is the configuration. Note the difference between server.settings.plugins which is used to store static configuration values and server.plugins which is meant for storing run-time state. Defaults to {}.*/ plugins?: IDictionary<any>; } export interface IServerViewCompile { (template: string, options: any): void; (template: string, options: any, callback: (err: any, compiled: (context: any, options: any, callback: (err: any, rendered: boolean) => void) => void) => void): void; } export interface <API key> { /** path - the root file path used to resolve and load the templates identified when calling reply.view().Defaults to current working directory.*/ path?: string; /**partialsPath - the root file path where partials are located.Partials are small segments of template code that can be nested and reused throughout other templates.Defaults to no partials support (empty path). */ partialsPath?: string; /**helpersPath - the directory path where helpers are located.Helpers are functions used within templates to perform transformations and other data manipulations using the template context or other inputs.Each '.js' file in the helpers directory is loaded and the file name is used as the helper name.The files must export a single method with the signature function(context) and return a string.Sub - folders are not supported and are ignored.Defaults to no helpers support (empty path).Note that jade does not support loading helpers this way.*/ helpersPath?: string; /**relativeTo - a base path used as prefix for path and partialsPath.No default.*/ relativeTo?: string; /**layout - if set to true or a layout filename, layout support is enabled.A layout is a single template file used as the parent template for other view templates in the same engine.If true, the layout template name must be 'layout.ext' where 'ext' is the engine's extension. Otherwise, the provided filename is suffixed with the engine's extension and loaded.Disable layout when using Jade as it will handle including any layout files independently.Defaults to false.*/ layout?: boolean | string; /**layoutPath - the root file path where layout templates are located (using the relativeTo prefix if present). Defaults to path.*/ layoutPath?: string; /**layoutKeyword - the key used by the template engine to denote where primary template content should go.Defaults to 'content'.*/ layoutKeywork?: string; /**encoding - the text encoding used by the templates when reading the files and outputting the result.Defaults to 'utf8'.*/ encoding?: string; /**isCached - if set to false, templates will not be cached (thus will be read from file on every use).Defaults to true.*/ isCached?: boolean; /**allowAbsolutePaths - if set to true, allows absolute template paths passed to reply.view().Defaults to false.*/ allowAbsolutePaths?: boolean; /**allowInsecureAccess - if set to true, allows template paths passed to reply.view() to contain '../'.Defaults to false.*/ allowInsecureAccess?: boolean; /**compileOptions - options object passed to the engine's compile function. Defaults to empty options {}.*/ compileOptions?: any; /**runtimeOptions - options object passed to the returned function from the compile operation.Defaults to empty options {}.*/ runtimeOptions?: any; /**contentType - the content type of the engine results.Defaults to 'text/html'.*/ contentType?: string; /**compileMode - specify whether the engine compile() method is 'sync' or 'async'.Defaults to 'sync'.*/ compileMode?: string; /**context - a global context used with all templates.The global context option can be either an object or a function that takes no arguments and returns a context object.When rendering views, the global context will be merged with any context object specified on the handler or using reply.view().When multiple context objects are used, values from the global context always have lowest precedence.*/ context?: any; } export interface <API key> extends <API key> { /**- the npm module used for rendering the templates.The module object must contain: "module", the rendering function. The required function signature depends on the compileMode settings. * If the compileMode is 'sync', the signature is compile(template, options), the return value is a function with signature function(context, options), and the method is allowed to throw errors.If the compileMode is 'async', the signature is compile(template, options, callback) where callback has the signature function(err, compiled) where compiled is a function with signature function(context, options, callback) and callback has the signature function(err, rendered).*/ module: { compile?(template: any, options: any): (context: any, options: any) => void; compile?(template: any, options: any, callback: (err: any, compiled: (context: any, options: any, callback: (err: any, rendered: any) => void) => void) => void): void; }; } /**Initializes the server views manager var Hapi = require('hapi'); var server = new Hapi.Server(); server.views({ engines: { html: require('handlebars'), jade: require('jade') }, path: '/static/templates' }); When server.views() is called within a plugin, the views manager is only available to plugins methods. */ export interface <API key> extends <API key> { /** - required object where each key is a file extension (e.g. 'html', 'hbr'), mapped to the npm module used for rendering the templates.Alternatively, the extension can be mapped to an object with the following options:*/ engines: IDictionary<any> | <API key>; /** defines the default filename extension to append to template names when multiple engines are configured and not explicit extension is provided for a given template. No default value.*/ defaultExtension?: string; } interface IReplyMethods { /** Returns control back to the framework without setting a response. If called in the handler, the response defaults to an empty payload with status code 200. * The data argument is only used for passing back authentication data and is ignored elsewhere. */ continue(credentialData?: any): void; /** Transmits a file from the file system. The 'Content-Type' header defaults to the matching mime type based on filename extension. The response flow control rules do not apply. */ file(/** the file path. */ path: string, /** optional settings: */ options?: { /** - an optional filename to specify if sending a 'Content-Disposition' header, defaults to the basename of path*/ filename?: string; /** specifies whether to include the 'Content-Disposition' header with the response. Available values: false - header is not included. This is the default value. 'attachment' 'inline'*/ mode?: boolean | string; /** if true, looks for the same filename with the '.gz' suffix for a pre-compressed version of the file to serve if the request supports content encoding. Defaults to false. */ lookupCompressed: boolean; }): void; /** Concludes the handler activity by returning control over to the router with a templatized view response. the response flow control rules apply. */ view(/** the template filename and path, relative to the templates path configured via the server views manager. */ template: string, /** optional object used by the template to render context-specific result. Defaults to no context {}. */ context?: {}, /** optional object used to override the server's views manager configuration for this response. Cannot override isCached, partialsPath, or helpersPath which are only loaded at initialization. */ options?: any): Response; /** Sets a header on the response */ header(name: string, value: string, options?: IHeaderOptions): Response; /** Concludes the handler activity by returning control over to the router and informing the router that a response has already been sent back directly via request.raw.res and that no further response action is needed The response flow control rules do not apply. */ close(options?: { /** if false, the router will not call request.raw.res.end()) to ensure the response was ended. Defaults to true. */ end?: boolean; }): void; /** Proxies the request to an upstream endpoint. the response flow control rules do not apply. */ proxy(/** an object including the same keys and restrictions defined by the route proxy handler options. */ options: IProxyHandlerConfig): void; /** Redirects the client to the specified uri. Same as calling reply().redirect(uri). he response flow control rules apply. */ redirect(uri: string): ResponseRedirect; /** Replies with the specified response */ response(result: any): Response; /** Sets a cookie on the response */ state(name: string, value: any, options?: any): void; /** Clears a cookie on the response */ unstate(name: string, options?: any): void; } /** Concludes the handler activity by setting a response and returning control over to the framework where: erran optional error response. result an optional response payload. Since an request can only have one response regardless if it is an error or success, the reply() method can only result in a single response value. This means that passing both an err and result will only use the err. There is no requirement for either err or result to be (or not) an Error object. The framework will simply use the first argument if present, otherwise the second. The method supports two arguments to be compatible with the common callback pattern of error first. FLOW CONTROL: When calling reply(), the framework waits until process.nextTick() to continue processing the request and transmit the response. This enables making changes to the returned response object before the response is sent. This means the framework will resume as soon as the handler method exits. To suspend this behavior, the returned response object supports the following methods: hold(), send() */ export interface IReply extends IReplyMethods { <T>(err: Error, result?: string | number | boolean | Buffer | stream.Stream | IPromise<T> | T, /** Note that when used to return both an error and credentials in the authentication methods, reply() must be called with three arguments function(err, null, data) where data is the additional authentication information. */ credentialData?: any): IBoom; /** Note that if result is a Stream with a statusCode property, that status code will be used as the default response code. */ <T>(result: string | number | boolean | Buffer | stream.Stream | IPromise<T> | T): Response; } /** Concludes the handler activity by setting a response and returning control over to the framework where: erran optional error response. result an optional response payload. Since an request can only have one response regardless if it is an error or success, the reply() method can only result in a single response value. This means that passing both an err and result will only use the err. There is no requirement for either err or result to be (or not) an Error object. The framework will simply use the first argument if present, otherwise the second. The method supports two arguments to be compatible with the common callback pattern of error first. FLOW CONTROL: When calling reply(), the framework waits until process.nextTick() to continue processing the request and transmit the response. This enables making changes to the returned response object before the response is sent. This means the framework will resume as soon as the handler method exits. To suspend this behavior, the returned response object supports the following methods: hold(), send() */ export interface IStrictReply<T> extends IReplyMethods { (err: Error, result?: IPromise<T> | T, /** Note that when used to return both an error and credentials in the authentication methods, reply() must be called with three arguments function(err, null, data) where data is the additional authentication information. */ credentialData?: any): IBoom; /** Note that if result is a Stream with a statusCode property, that status code will be used as the default response code. */ (result: IPromise<T> | T): Response; } export interface ISessionHandler { (request: Request, reply: IReply): void; <T>(request: Request, reply: IStrictReply<T>): void; } export interface IRequestHandler<T> { (request: Request): T; } export interface IFailAction { (source: string, error: any, next: () => void): void } /** generates a reverse proxy handler */ export interface IProxyHandlerConfig { /** the upstream service host to proxy requests to. The same path on the client request will be used as the path on the host.*/ host?: string; /** the upstream service port. */ port?: number; /** The protocol to use when making a request to the proxied host: 'http' 'https'*/ protocol?: string; /** an absolute URI used instead of the incoming host, port, protocol, path, and query. Cannot be used with host, port, protocol, or mapUri.*/ uri?: string; /** if true, forwards the headers sent from the client to the upstream service being proxied to, headers sent from the upstream service will also be forwarded to the client. Defaults to false.*/ passThrough?: boolean; /** <API key> - if false, any locally defined state is removed from incoming requests before being passed upstream. This is a security feature to prevent local state (e.g. authentication cookies) from leaking upstream to other servers along with the cookies intended for those servers. This value can be overridden on a per state basis via the server.state() passThrough option. Defaults to false.*/ <API key>?: boolean; /**acceptEncoding - if false, does not pass-through the 'Accept-Encoding' HTTP header which is useful when using an onResponse post-processing to avoid receiving an encoded response (e.g. gzipped). Can only be used together with passThrough. Defaults to true (passing header).*/ acceptEncoding?: boolean; /** rejectUnauthorized - sets the rejectUnauthorized property on the https agent making the request. This value is only used when the proxied server uses TLS/SSL. When set it will override the node.js rejectUnauthorized property. If false then ssl errors will be ignored. When true the server certificate is verified and an 500 response will be sent when verification fails. This shouldn't be used alongside the agent setting as the agent will be used instead. Defaults to the https agent default value of true.*/ rejectUnauthorized?: boolean; /**if true, sets the 'X-Forwarded-For', 'X-Forwarded-Port', 'X-Forwarded-Proto' headers when making a request to the proxied upstream endpoint. Defaults to false.*/ xforward?: boolean; /** the maximum number of HTTP redirections allowed, to be followed automatically by the handler. Set to false or 0 to disable all redirections (the response will contain the redirection received from the upstream service). If redirections are enabled, no redirections (301, 302, 307, 308) will be passed along to the client, and reaching the maximum allowed redirections will return an error response. Defaults to false.*/ redirects?: boolean | number; /**number of milliseconds before aborting the upstream request. Defaults to 180000 (3 minutes).*/ timeout?: number; /** a function used to map the request URI to the proxied URI. Cannot be used together with host, port, protocol, or uri. The function signature is function(request, callback) where: request - is the incoming request object. callback - is function(err, uri, headers) where: err - internal error condition. uri - the absolute proxy URI. headers - optional object where each key is an HTTP request header and the value is the header content.*/ mapUri?: (request: Request, callback: (err: any, uri: string, headers?: { [key: string]: string }) => void) => void; /** a custom function for processing the response from the upstream service before sending to the client. Useful for custom error handling of responses from the proxied endpoint or other payload manipulation. Function signature is function(err, res, request, reply, settings, ttl) where: - err - internal or upstream error returned from attempting to contact the upstream proxy. - res - the node response object received from the upstream service. res is a readable stream (use the wreck module read method to easily convert it to a Buffer or string). - request - is the incoming request object. - reply - the reply interface function. - settings - the proxy handler configuration. - ttl - the upstream TTL in milliseconds if proxy.ttl it set to 'upstream' and the upstream response included a valid 'Cache-Control' header with 'max-age'.*/ onResponse?: (err: any, res: http.ServerResponse, req: Request, reply: IReply, settings: IProxyHandlerConfig, ttl: number) => void; /** if set to 'upstream', applies the upstream response caching policy to the response using the response.ttl() method (or passed as an argument to the onResponse method if provided).*/ ttl?: number; agent?: http.Agent; /** sets the maximum number of sockets available per outgoing proxy host connection. false means use the wreck module default value (Infinity). Does not affect non-proxy outgoing client connections. Defaults to Infinity.*/ maxSockets?: boolean | number; } /** TODO: fill in joi definition */ export interface IJoi { } /** a validation function using the signature function(value, options, next) */ export interface IValidationFunction { (/** the object containing the path parameters. */ value: any, /** the server validation options. */ options: any, /** the callback function called when validation is completed. */ next: (err: any, value: any) => void): void; } /** a custom error handler function with the signature 'function(request, reply, source, error)` */ export interface IRouteFailFunction { /** a custom error handler function with the signature 'function(request, reply, source, error)` */ (/** - the [request object]. */ request: Request, /** the continuation reply interface. */ reply: IReply, /** the source of the invalid field (e.g. 'path', 'query', 'payload'). */ source: string, /** the error object prepared for the client response (including the validation function error under error.data). */ error: any): void; } /** Each route can be customize to change the default behavior of the request lifecycle using the following options: */ export interface <API key> { /** application specific configuration.Should not be used by plugins which should use plugins[name] instead. */ app?: any; /** authentication configuration.Value can be: false to disable authentication if a default strategy is set. a string with the name of an authentication strategy registered with server.auth.strategy(). an object */ auth?: boolean | string | { /** the authentication mode.Defaults to 'required' if a server authentication strategy is configured, otherwise defaults to no authentication.Available values: 'required'authentication is required. 'optional'authentication is optional (must be valid if present). 'try'same as 'optional' but allows for invalid authentication. */ mode?: string; /** a string array of strategy names in order they should be attempted.If only one strategy is used, strategy can be used instead with the single string value.Defaults to the default authentication strategy which is available only when a single strategy is configured. */ strategies?: string | Array<string>; /** if set, the payload (in requests other than 'GET' and 'HEAD') is authenticated after it is processed.Requires a strategy with payload authentication support (e.g.Hawk).Cannot be set to a value other than 'required' when the scheme sets the options.payload to true.Available values: falseno payload authentication.This is the default value. 'required'payload authentication required.This is the default value when the scheme sets options.payload to true. 'optional'payload authentication performed only when the client includes payload authentication information (e.g.hash attribute in Hawk). */ payload?: string; /** the application scope required to access the route.Value can be a scope string or an array of scope strings.The authenticated credentials object scope property must contain at least one of the scopes defined to access the route.Set to false to remove scope requirements.Defaults to no scope required. */ scope?: string | Array<string> | boolean; /** the required authenticated entity type.If set, must match the entity value of the authentication credentials.Available values: anythe authentication can be on behalf of a user or application.This is the default value. userthe authentication must be on behalf of a user. appthe authentication must be on behalf of an application. */ entity?: string; /** * an object or array of objects specifying the route access rules. Each rule is evaluated against an incoming * request and access is granted if at least one rule matches. Each rule object must include at least one of: */ access?: <API key> | <API key>[]; }; /** an object passed back to the provided handler (via this) when called. */ bind?: any; /** if the route method is 'GET', the route can be configured to include caching directives in the response using the following options */ cache?: { /** mines the privacy flag included in clientside caching using the 'Cache-Control' header.Values are: fault'no privacy flag.This is the default setting. 'public'mark the response as suitable for public caching. 'private'mark the response as suitable only for private caching. */ privacy: string; /** relative expiration expressed in the number of milliseconds since the item was saved in the cache.Cannot be used together with expiresAt. */ expiresIn: number; /** time of day expressed in 24h notation using the 'HH:MM' format, at which point all cache records for the route expire.Cannot be used together with expiresIn. */ expiresAt: string; }; /** the Cross- Origin Resource Sharing protocol allows browsers to make cross- origin API calls.CORS is required by web applications running inside a browser which are loaded from a different domain than the API server.CORS headers are disabled by default. To enable, set cors to true, or to an object with the following options: */ cors?: { /** a strings array of allowed origin servers ('<API key>').The array can contain any combination of fully qualified origins along with origin strings containing a wildcard '' character, or a single `''origin string. Defaults to any origin['*']`. */ origin?: Array<string>; /** if true, matches the value of the incoming 'Origin' header to the list of origin values ('*' matches anything) and if a match is found, uses that as the value of the '<API key>' response header.When false, the origin config is returned as- is.Defaults to true. */ matchOrigin?: boolean; /** if false, prevents the connection from returning the full list of non- wildcard origin values if the incoming origin header does not match any of the values.Has no impact if matchOrigin is set to false.Defaults to true. */ isOriginExposed?: boolean; /** number of seconds the browser should cache the CORS response ('<API key>').The greater the value, the longer it will take before the browser checks for changes in policy.Defaults to 86400 (one day). */ maxAge?: number; /** a strings array of allowed headers ('<API key>').Defaults to ['Authorization', 'Content-Type', 'If-None-Match']. */ headers?: string[]; /** a strings array of additional headers to headers.Use this to keep the default headers in place. */ additionalHeaders?: string[]; /** a strings array of allowed HTTP methods ('<API key>').Defaults to ['GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'OPTIONS']. */ methods?: string[]; /** a strings array of additional methods to methods.Use this to keep the default methods in place. */ additionalMethods?: string[]; /** a strings array of exposed headers ('<API key>').Defaults to ['WWW-Authenticate', '<API key>']. */ exposedHeaders?: string[]; /** a strings array of additional headers to exposedHeaders.Use this to keep the default headers in place. */ <API key>?: string[]; /** if true, allows user credentials to be sent ('<API key>').Defaults to false. */ credentials?: boolean; /** if false, preserves existing CORS headers set manually before the response is sent.Defaults to true. */ override?: boolean; }; /** defines the behavior for serving static resources using the built-in route handlers for files and directories: */ files?: {/** determines the folder relative paths are resolved against when using the file and directory handlers. */ relativeTo: string; }; /** an alternative location for the route handler option. */ handler?: ISessionHandler | string | IRouteHandlerConfig; /** an optional unique identifier used to look up the route using server.lookup(). */ id?: number; /** optional arguments passed to JSON.stringify() when converting an object or error response to a string payload.Supports the following: */ json?: { /** the replacer function or array.Defaults to no action. */ replacer?: Function | string[]; /** number of spaces to indent nested object keys.Defaults to no indentation. */ space?: number | string; /** string suffix added after conversion to JSON string.Defaults to no suffix. */ suffix?: string; }; /** enables JSONP support by setting the value to the query parameter name containing the function name used to wrap the response payload.For example, if the value is 'callback', a request comes in with 'callback=me', and the JSON response is '{ "a":"b" }', the payload will be 'me({ "a":"b" });'.Does not work with stream responses. */ jsonp?: string; /** determines how the request payload is processed: */ payload?: { /** the type of payload representation requested. The value must be one of: 'data'the incoming payload is read fully into memory.If parse is true, the payload is parsed (JSON, formdecoded, multipart) based on the 'Content- Type' header.If parse is false, the raw Buffer is returned.This is the default value except when a proxy handler is used. 'stream'the incoming payload is made available via a Stream.Readable interface.If the payload is 'multipart/form-data' and parse is true, fields values are presented as text while files are provided as streams.File streams from a 'multipart/form-data' upload will also have a property hapi containing filename and headers properties. 'file'the incoming payload in written to temporary file in the directory specified by the server's payload.uploads settings. If the payload is 'multipart/ formdata' and parse is true, fields values are presented as text while files are saved. Note that it is the sole responsibility of the application to clean up the files generated by the framework. This can be done by keeping track of which files are used (e.g. using the request.app object), and listening to the server 'response' event to perform any needed cleaup. */ output?: string; /** can be true, false, or gunzip; determines if the incoming payload is processed or presented raw. true and gunzip includes gunzipping when the appropriate 'Content-Encoding' is specified on the received request. If parsing is enabled and the 'Content-Type' is known (for the whole payload as well as parts), the payload is converted into an object when possible. If the format is unknown, a Bad Request (400) error response is sent. Defaults to true, except when a proxy handler is used. The supported mime types are: 'application/json' 'application/<API key>' 'application/octet-stream' 'text/ *' 'multipart/form-data' */ parse?: string | boolean; /** a string or an array of strings with the allowed mime types for the endpoint.Defaults to any of the supported mime types listed above.Note that allowing other mime types not listed will not enable them to be parsed, and that if parsing mode is 'parse', the request will result in an error response. */ allow?: string | string[]; /** a mime type string overriding the 'Content-Type' header value received.Defaults to no override. */ override?: string; /** limits the size of incoming payloads to the specified byte count.Allowing very large payloads may cause the server to run out of memory.Defaults to 1048576 (1MB). */ maxBytes?: number; /** payload reception timeout in milliseconds.Sets the maximum time allowed for the client to transmit the request payload (body) before giving up and responding with a Request Timeout (408) error response.Set to false to disable.Defaults to 10000 (10 seconds). */ timeout?: number; /** the directory used for writing file uploads.Defaults to os.tmpDir(). */ uploads?: string; /** determines how to handle payload parsing errors. Allowed values are: 'error'return a Bad Request (400) error response. This is the default value. 'log'report the error but continue processing the request. 'ignore'take no action and continue processing the request. */ failAction?: string; }; /** pluginspecific configuration.plugins is an object where each key is a plugin name and the value is the plugin configuration. */ plugins?: IDictionary<any>; /** an array with [route prerequisites] methods which are executed in serial or in parallel before the handler is called. */ pre?: any[]; /** validation rules for the outgoing response payload (response body).Can only validate object response: */ response?: { /** the default HTTP status code when the payload is empty. Value can be 200 or 204. Note that a 200 status code is converted to a 204 only at the time or response transmission (the response status code will remain 200 throughout the request lifecycle unless manually set). Defaults to 200. */ emptyStatusCode?: number; /** the default response object validation rules (for all non-error responses) expressed as one of: true - any payload allowed (no validation performed). This is the default. false - no payload allowed. a Joi validation object. a validation function using the signature function(value, options, next) where: value - the object containing the response object. options - the server validation options. next(err) - the callback function called when validation is completed. */ schema?: boolean | any; /** HTTP status- codespecific validation rules.The status key is set to an object where each key is a 3 digit HTTP status code and the value has the same definition as schema.If a response status code is not present in the status object, the schema definition is used, expect for errors which are not validated by default. */ status?: { [statusCode: number]: boolean | any }; /** the percent of responses validated (0100).Set to 0 to disable all validation.Defaults to 100 (all responses). */ sample?: number; /** defines what to do when a response fails validation.Options are: errorreturn an Internal Server Error (500) error response.This is the default value. loglog the error but send the response. */ failAction?: string; /** if true, applies the validation rule changes to the response.Defaults to false. */ modify?: boolean; options?: any; }; /** sets common security headers (disabled by default).To enable set security to true or to an object with the following options */ security?: boolean | { /** controls the '<API key>' header.If set to true the header will be set to max- age=15768000, if specified as a number the maxAge parameter will be set to that number.Defaults to true.You may also specify an object with the following fields: */ hsts?: boolean | number | { /** the max- age portion of the header, as a number.Default is 15768000. */ maxAge?: number; /** a boolean specifying whether to add the includeSubdomains flag to the header. */ includeSubdomains?: boolean; /** a boolean specifying whether to add the 'preload' flag (used to submit domains inclusion in Chrome's HTTP Strict Transport Security (HSTS) preload list) to the header. */ preload?: boolean; }; /** controls the 'X-Frame-Options' header.When set to true the header will be set to DENY, you may also specify a string value of 'deny' or 'sameorigin'.To use the 'allow-from' rule, you must set this to an object with the following fields: */ xframe?: { /** either 'deny', 'sameorigin', or 'allow-from' */ rule: string; /** when rule is 'allow-from' this is used to form the rest of the header, otherwise this field is ignored.If rule is 'allow-from' but source is unset, the rule will be automatically changed to 'sameorigin'. */ source: string; }; /** boolean that controls the 'X-XSS-PROTECTION' header for IE.Defaults to true which sets the header to equal '1; mode=block'.NOTE: This setting can create a security vulnerability in versions of IE below 8, as well as unpatched versions of IE8.See here and here for more information.If you actively support old versions of IE, it may be wise to explicitly set this flag to false. */ xss?: boolean; /** boolean controlling the 'X-Download-Options' header for IE, preventing downloads from executing in your context.Defaults to true setting the header to 'noopen'. */ noOpen?: boolean; /** boolean controlling the '<API key>' header.Defaults to true setting the header to its only and default option, 'nosniff'. */ noSniff?: boolean; }; /** HTTP state management (cookies) allows the server to store information on the client which is sent back to the server with every request (as defined in RFC 6265).state supports the following options: */ state?: { /** determines if incoming 'Cookie' headers are parsed and stored in the request.state object.Defaults to true. */ parse: boolean; /** determines how to handle cookie parsing errors.Allowed values are: 'error'return a Bad Request (400) error response.This is the default value. 'log'report the error but continue processing the request. 'ignore'take no action. */ failAction: string; }; /** request input validation rules for various request components.When using a Joi validation object, the values of the other inputs (i.e.headers, query, params, payload, and auth) are made available under the validation context (accessible in rules as Joi.ref('$query.key')).Note that validation is performed in order(i.e.headers, params, query, payload) and if type casting is used (converting a string to number), the value of inputs not yet validated will reflect the raw, unvalidated and unmodified values.The validate object supports: */ validate?: { /** validation rules for incoming request headers.Values allowed: * trueany headers allowed (no validation performed).This is the default. falseno headers allowed (this will cause all valid HTTP requests to fail). a Joi validation object. a validation function using the signature function(value, options, next) where: valuethe object containing the request headers. optionsthe server validation options. next(err, value)the callback function called when validation is completed. */ headers?: boolean | IJoi | IValidationFunction; /** validation rules for incoming request path parameters, after matching the path against the route and extracting any parameters then stored in request.params.Values allowed: trueany path parameters allowed (no validation performed).This is the default. falseno path variables allowed. a Joi validation object. a validation function using the signature function(value, options, next) where: valuethe object containing the path parameters. optionsthe server validation options. next(err, value)the callback function called when validation is completed. */ params?: boolean | IJoi | IValidationFunction; /** validation rules for an incoming request URI query component (the key- value part of the URI between '?' and '#').The query is parsed into its individual key- value pairs (using the qs module) and stored in request.query prior to validation.Values allowed: trueany query parameters allowed (no validation performed).This is the default. falseno query parameters allowed. a Joi validation object. a validation function using the signature function(value, options, next) where: valuethe object containing the query parameters. optionsthe server validation options. next(err, value)the callback function called when validation is completed. */ query?: boolean | IJoi | IValidationFunction; /** validation rules for an incoming request payload (request body).Values allowed: trueany payload allowed (no validation performed).This is the default. falseno payload allowed. a Joi validation object. a validation function using the signature function(value, options, next) where: valuethe object containing the payload object. optionsthe server validation options. next(err, value)the callback function called when validation is completed. */ payload?: boolean | IJoi | IValidationFunction; /** an optional object with error fields copied into every validation error response. */ errorFields?: any; /** determines how to handle invalid requests.Allowed values are: 'error'return a Bad Request (400) error response.This is the default value. 'log'log the error but continue processing the request. 'ignore'take no action. OR a custom error handler function with the signature 'function(request, reply, source, error)` where: requestthe request object. replythe continuation reply interface. sourcethe source of the invalid field (e.g. 'path', 'query', 'payload'). errorthe error object prepared for the client response (including the validation function error under error.data). */ failAction?: string | IRouteFailFunction; options?: any; }; /** define timeouts for processing durations: */ timeout?: { /** response timeout in milliseconds.Sets the maximum time allowed for the server to respond to an incoming client request before giving up and responding with a Service Unavailable (503) error response.Disabled by default (false). */ server: boolean | number; /** by default, node sockets automatically timeout after 2 minutes.Use this option to override this behavior.Defaults to undefined which leaves the node default unchanged.Set to false to disable socket timeouts. */ socket: boolean | number; }; /** ONLY WHEN ADDING NEW ROUTES (not when setting defaults). *route description used for generating documentation (string). */ description?: string; /** ONLY WHEN ADDING NEW ROUTES (not when setting defaults). *route notes used for generating documentation (string or array of strings). */ notes?: string | string[]; /** ONLY WHEN ADDING NEW ROUTES (not when setting defaults). *route tags used for generating documentation (array of strings). */ tags?: string[] } /** * specifying the route access rules. Each rule is evaluated against an incoming request and access is granted if at least one rule matches */ export interface <API key> { /** * the application scope required to access the route. Value can be a scope string or an array of scope strings. * The authenticated credentials object scope property must contain at least one of the scopes defined to access the route. * If a scope string begins with a + character, that scope is required. If a scope string begins with a ! character, * that scope is forbidden. For example, the scope ['!a', '+b', 'c', 'd'] means the incoming request credentials' * scope must not include 'a', must include 'b', and must include on of 'c' or 'd'. You may also access properties * on the request object (query and params} to populate a dynamic scope by using {} characters around the property name, * such as 'user-{params.id}'. Defaults to false (no scope requirements). */ scope?: string | Array<string> | boolean; /** the required authenticated entity type. If set, must match the entity value of the authentication credentials. Available values: * any - the authentication can be on behalf of a user or application. This is the default value. * user - the authentication must be on behalf of a user which is identified by the presence of a user attribute in the credentials object returned by the authentication strategy. * app - the authentication must be on behalf of an application which is identified by the lack of presence of a user attribute in the credentials object returned by the authentication strategy. */ entity?: string; } export interface IServerRealm { /** when the server object is provided as an argument to the plugin register() method, modifiers provides the registration preferences passed the server.register() method */ modifiers: { /** routes preferences: */ route: { /** - the route path prefix used by any calls to server.route() from the server. */ prefix: string; /** the route virtual host settings used by any calls to server.route() from the server. */ vhost: string; }; }; /** the active plugin name (empty string if at the server root). */ plugin: string; /** plugin-specific state to be shared only among activities sharing the same active state. plugins is an object where each key is a plugin name and the value is the plugin state. */ plugins: IDictionary<any>; /** settings overrides */ settings: { files: { relativeTo: any; }; bind: any; } } export interface IServerState { /** - the cookie name string. */name: string; /** - are the optional cookie settings: */options: { /** - time - to - live in milliseconds.Defaults to null (session time- life - cookies are deleted when the browser is closed).*/ttl: number; /** - sets the 'Secure' flag.Defaults to false.*/isSecure: boolean; /** - sets the 'HttpOnly' flag.Defaults to false.*/isHttpOnly: boolean /** - the path scope.Defaults to null (no path).*/path: any; /** - the domain scope.Defaults to null (no domain). */domain: any; /** if present and the cookie was not received from the client or explicitly set by the route handler, the cookie is automatically added to the response with the provided value. The value can be a function with signature function(request, next) where: request - the request object. next - the continuation function using the function(err, value) signature.*/ autoValue: (request: Request, next: (err: any, value: any) => void) => void; /** - encoding performs on the provided value before serialization. Options are: 'none' - no encoding. When used, the cookie value must be a string. This is the default value. 'base64' - string value is encoded using Base64. 'base64json' - object value is JSON-stringified than encoded using Base64. 'form' - object value is encoded using the <API key> method. 'iron' - Encrypts and sign the value using iron.*/ encoding: string; /** - an object used to calculate an HMAC for cookie integrity validation.This does not provide privacy, only a mean to verify that the cookie value was generated by the server.Redundant when 'iron' encoding is used.Options are:*/sign: { /** - algorithm options.Defaults to require('iron').defaults.integrity.*/integrity: any; /** - password used for HMAC key generation.*/password: string; }; /** - password used for 'iron' encoding.*/password: string; /** - options for 'iron' encoding.Defaults to require('iron').defaults.*/iron: any; /** - if false, errors are ignored and treated as missing cookies.*/ignoreErrors: boolean; /** - if true, automatically instruct the client to remove invalid cookies.Defaults to false.*/clearInvalid: boolean; /** - if false, allows any cookie value including values in violation of RFC 6265. Defaults to true.*/strictHeader: boolean; /** - overrides the default proxy <API key> setting.*/passThrough: any; }; } export interface IFileHandlerConfig { /** a path string or function as described above.*/ path: string; /** an optional filename to specify if sending a 'Content-Disposition' header, defaults to the basename of path*/ filename?: string; /**- specifies whether to include the 'Content-Disposition' header with the response. Available values: false - header is not included. This is the default value. 'attachment' 'inline'*/ mode?: boolean | string; /** if true, looks for the same filename with the '.gz' suffix for a pre-compressed version of the file to serve if the request supports content encoding. Defaults to false.*/ lookupCompressed: boolean; } export interface IRouteHandlerConfig { /** generates a static file endpoint for serving a single file. file can be set to: a relative or absolute file path string (relative paths are resolved based on the route files configuration). a function with the signature function(request) which returns the relative or absolute file path. an object with the following options */ file?: string | IRequestHandler<void> | IFileHandlerConfig; /** directory - generates a directory endpoint for serving static content from a directory. Routes using the directory handler must include a path parameter at the end of the path string (e.g. /path/to/somewhere/{param} where the parameter name does not matter). The path parameter can use any of the parameter options (e.g. {param} for one level files only, {param?} for one level files or the directory root, {param*} for any level, or {param*3} for a specific level). If additional path parameters are present, they are ignored for the purpose of selecting the file system resource. The directory handler is an object with the following options: path - (required) the directory root path (relative paths are resolved based on the route files configuration). Value can be: a single path string used as the prefix for any resources requested by appending the request path parameter to the provided string. an array of path strings. Each path will be attempted in order until a match is found (by following the same process as the single path string). a function with the signature function(request) which returns the path string or an array of path strings. If the function returns an error, the error is passed back to the client in the response. index - optional boolean|string|string[], determines if an index file will be served if found in the folder when requesting a directory. The given string or strings specify the name(s) of the index file to look for. If true, looks for 'index.html'. Any falsy value disables index file lookup. Defaults to true. listing - optional boolean, determines if directory listing is generated when a directory is requested without an index document. Defaults to false. showHidden - optional boolean, determines if hidden files will be shown and served. Defaults to false. redirectToSlash - optional boolean, determines if requests for a directory without a trailing slash are redirected to the same path with the missing slash. Useful for ensuring relative links inside the response are resolved correctly. Disabled when the server config router.stripTrailingSlash is true.Defaults to false. lookupCompressed - optional boolean, instructs the file processor to look for the same filename with the '.gz' suffix for a pre-compressed version of the file to serve if the request supports content encoding. Defaults to false. defaultExtension - optional string, appended to file requests if the requested file is not found. Defaults to no extension.*/ directory?: { path: string | Array<string> | IRequestHandler<string> | IRequestHandler<Array<string>>; index?: boolean | string | string[]; listing?: boolean; showHidden?: boolean; redirectToSlash?: boolean; lookupCompressed?: boolean; defaultExtension?: string; }; proxy?: IProxyHandlerConfig; view?: string | { template: string; context: { payload: any; params: any; query: any; pre: any; } }; config?: { handler: any; bind: any; app: any; plugins: { [name: string]: any; }; pre: Array<() => void>; validate: { headers: any; params: any; query: any; payload: any; errorFields?: any; failAction?: string | IFailAction; }; payload: { output: { data: any; stream: any; file: any; }; parse?: any; allow?: string | Array<string>; override?: string; maxBytes?: number; uploads?: number; failAction?: string; }; response: { schema: any; sample: number; failAction: string; }; cache: { privacy: string; expiresIn: number; expiresAt: number; }; auth: string | boolean | { mode: string; strategies: Array<string>; payload?: boolean | string; tos?: boolean | string; scope?: string | Array<string>; entity: string; }; cors?: boolean; jsonp?: string; description?: string; notes?: string | Array<string>; tags?: Array<string>; }; } /** Route configuration The route configuration object*/ export interface IRouteConfiguration { /** - (required) the absolute path used to match incoming requests (must begin with '/'). Incoming requests are compared to the configured paths based on the connection router configuration option.The path can include named parameters enclosed in {} which will be matched against literal values in the request as described in Path parameters.*/ path: string; /** - (required) the HTTP method.Typically one of 'GET', 'POST', 'PUT', 'PATCH', 'DELETE', or 'OPTIONS'.Any HTTP method is allowed, except for 'HEAD'.Use '*' to match against any HTTP method (only when an exact match was not found, and any match with a specific method will be given a higher priority over a wildcard match). * Can be assigned an array of methods which has the same result as adding the same route with different methods manually.*/ method: string | string[]; /** - an optional domain string or an array of domain strings for limiting the route to only requests with a matching host header field.Matching is done against the hostname part of the header only (excluding the port).Defaults to all hosts.*/ vhost?: string; /** - (required) the function called to generate the response after successful authentication and validation.The handler function is described in Route handler.If set to a string, the value is parsed the same way a prerequisite server method string shortcut is processed.Alternatively, handler can be assigned an object with a single key using the name of a registered handler type and value with the options passed to the registered handler.*/ handler?: ISessionHandler | string | IRouteHandlerConfig; /** - additional route options.*/ config?: <API key>; } export interface IRoute { /** the route HTTP method. */ method: string; /** the route path. */ path: string; /** the route vhost option if configured. */ vhost?: string | Array<string>; /** the [active realm] associated with the route.*/ realm: IServerRealm; /** the [route options] object with all defaults applied. */ settings: <API key>; } export interface IServerAuthScheme { /** authenticate(request, reply) - required function called on each incoming request configured with the authentication scheme where: request - the request object. reply - the reply interface the authentication method must call when done authenticating the request where: reply(err, response, result) - is called if authentication failed where: err - any authentication error. response - any authentication response action such as redirection. Ignored if err is present, otherwise required. result - an object containing: credentials - the authenticated credentials. artifacts - optional authentication artifacts. reply.continue(result) - is called if authentication succeeded where: result - same object as result above. When the scheme authenticate() method implementation calls reply() with an error condition, the specifics of the error affect whether additional authentication strategies will be attempted if configured for the route. .If the err returned by the reply() method includes a message, no additional strategies will be attempted. If the err does not include a message but does include a scheme name (e.g. Boom.unauthorized(null, 'Custom')), additional strategies will be attempted in order of preference. var server = new Hapi.Server(); server.connection({ port: 80 }); var scheme = function (server, options) { return { authenticate: function (request, reply) { var req = request.raw.req; var authorization = req.headers.authorization; if (!authorization) { return reply(Boom.unauthorized(null, 'Custom')); } return reply(null, { credentials: { user: 'john' } }); } }; }; server.auth.scheme('custom', scheme);*/ authenticate(request: Request, reply: IReply): void; authenticate<T>(request: Request, reply: IStrictReply<T>): void; /** payload(request, reply) - optional function called to authenticate the request payload where: request - the request object. reply(err, response) - is called if authentication failed where: err - any authentication error. response - any authentication response action such as redirection. Ignored if err is present, otherwise required. reply.continue() - is called if payload authentication succeeded. When the scheme payload() method returns an error with a message, it means payload validation failed due to bad payload. If the error has no message but includes a scheme name (e.g. Boom.unauthorized(null, 'Custom')), authentication may still be successful if the route auth.payload configuration is set to 'optional'.*/ payload?(request: Request, reply: IReply): void; payload?<T>(request: Request, reply: IStrictReply<T>): void; /** response(request, reply) - optional function called to decorate the response with authentication headers before the response headers or payload is written where: request - the request object. reply(err, response) - is called if an error occurred where: err - any authentication error. response - any authentication response to send instead of the current response. Ignored if err is present, otherwise required. reply.continue() - is called if the operation succeeded.*/ response?(request: Request, reply: IReply): void; response?<T>(request: Request, reply: IStrictReply<T>): void; /** an optional object */ options?: { /** if true, requires payload validation as part of the scheme and forbids routes from disabling payload auth validation. Defaults to false.*/ payload: boolean; } } /**the response object where: statusCode - the HTTP status code. headers - an object containing the headers set. payload - the response payload string. rawPayload - the raw response payload buffer. raw - an object with the injection request and response objects: req - the simulated node request object. res - the simulated node response object. result - the raw handler response (e.g. when not a stream or a view) before it is serialized for transmission. If not available, the value is set to payload. Useful for inspection and reuse of the internal objects returned (instead of parsing the response string). request - the request object.*/ export interface <API key> { statusCode: number; headers: IDictionary<string>; payload: string; rawPayload: Buffer; raw: { req: http.IncomingMessage; res: http.ServerResponse }; result: string; request: Request; } export interface IServerInject { (options: string | <API key>, callback: (res: <API key>) => void): void; (options: string | <API key>): IPromise<<API key>>; } export interface <API key> { /** the request HTTP method (e.g. 'POST'). Defaults to 'GET'.*/ method: string; /** the request URL. If the URI includes an authority (e.g. 'example.com:8080'), it is used to automatically set an HTTP 'Host' header, unless one was specified in headers.*/ url: string; /** an object with optional request headers where each key is the header name and the value is the header content. Defaults to no additions to the default Shot headers.*/ headers?: IDictionary<string>; /** n optional string, buffer or object containing the request payload. In case of an object it will be converted to a string for you. Defaults to no payload. Note that payload processing defaults to 'application/json' if no 'Content-Type' header provided.*/ payload?: string | {} | Buffer; /** an optional credentials object containing authentication information. The credentials are used to bypass the default authentication strategies, and are validated directly as if they were received via an authentication scheme. Defaults to no credentials.*/ credentials?: any; /** an optional artifacts object containing authentication artifact information. The artifacts are used to bypass the default authentication strategies, and are validated directly as if they were received via an authentication scheme. Ignored if set without credentials. Defaults to no artifacts.*/ artifacts?: any; /** sets the initial value of request.app*/ app?: any; /** sets the initial value of request.plugins*/ plugins?: any; /** allows access to routes with config.isInternal set to true. Defaults to false.*/ allowInternals?: boolean; /** sets the remote address for the incoming connection.*/ remoteAddress?: boolean; /**object with options used to simulate client request stream conditions for testing: error - if true, emits an 'error' event after payload transmission (if any). Defaults to false. close - if true, emits a 'close' event after payload transmission (if any). Defaults to false. end - if false, does not end the stream. Defaults to true.*/ simulate?: { error: boolean; close: boolean; end: boolean; }; } /** host - optional host to filter routes matching a specific virtual host. Defaults to all virtual hosts. The return value is an array where each item is an object containing: info - the connection.info the connection the table was generated for. labels - the connection labels. table - an array of routes where each route contains: settings - the route config with defaults applied. method - the HTTP method in lower case. path - the route path.*/ export interface IConnectionTable { info: any; labels: any; table: IRoute[]; } export interface ICookieSettings { /** - time - to - live in milliseconds.Defaults to null (session time- life - cookies are deleted when the browser is closed).*/ ttl?: number; /** - sets the 'Secure' flag.Defaults to false.*/ isSecure?: boolean; /** - sets the 'HttpOnly' flag.Defaults to false.*/ isHttpOnly?: boolean; /** - the path scope.Defaults to null (no path).*/ path?: string; /** - the domain scope.Defaults to null (no domain).*/ domain?: any; /** - if present and the cookie was not received from the client or explicitly set by the route handler, the cookie is automatically added to the response with the provided value.The value can be a function with signature function(request, next) where: request - the request object. next - the continuation function using the function(err, value) signature.*/ autoValue?: (request: Request, next: (err: any, value: any) => void) => void; /** - encoding performs on the provided value before serialization.Options are: 'none' - no encoding.When used, the cookie value must be a string.This is the default value. 'base64' - string value is encoded using Base64. 'base64json' - object value is JSON- stringified than encoded using Base64. 'form' - object value is encoded using the x- www - form - urlencoded method. */ encoding?: string; /** - an object used to calculate an HMAC for cookie integrity validation.This does not provide privacy, only a mean to verify that the cookie value was generated by the server.Redundant when 'iron' encoding is used.Options are: integrity - algorithm options.Defaults to require('iron').defaults.integrity. password - password used for HMAC key generation. */ sign?: { integrity: any; password: string; } password?: string; iron?: any; ignoreErrors?: boolean; clearInvalid?: boolean; strictHeader?: boolean; passThrough?: any; } /** method - the method function with the signature is one of: function(arg1, arg2, ..., argn, next) where: arg1, arg2, etc. - the method function arguments. next - the function called when the method is done with the signature function(err, result, ttl) where: err - error response if the method failed. result - the return value. ttl - 0 if result is valid but cannot be cached. Defaults to cache policy. function(arg1, arg2, ..., argn) where: arg1, arg2, etc. - the method function arguments. the callback option is set to false. the method must returns a value (result, Error, or a promise) or throw an Error.*/ export interface IServerMethod { //(): void; //(next: (err: any, result: any, ttl: number) => void): void; //(arg1: any): void; //(arg1: any, arg2: any, next: (err: any, result: any, ttl: number) => void): void; //(arg1: any, arg2: any): void; (...args: any[]): void; } /** options - optional configuration: bind - a context object passed back to the method function (via this) when called. Defaults to active context (set via server.bind() when the method is registered. cache - the same cache configuration used in server.cache(). callback - if false, expects the method to be a synchronous function. Note that using a synchronous function with caching will convert the method interface to require a callback as an additional argument with the signature function(err, result, cached, report) since the cache interface cannot return values synchronously. Defaults to true. generateKey - a function used to generate a unique key (for caching) from the arguments passed to the method function (the callback argument is not passed as input). The server will automatically generate a unique key if the function's arguments are all of types 'string', 'number', or 'boolean'. However if the method uses other types of arguments, a key generation function must be provided which takes the same arguments as the function and returns a unique string (or null if no key can be generated).*/ export interface <API key> { bind?: any; cache?: ICatBoxCacheOptions; callback?: boolean; generateKey?(args: any[]): string; } /** Request object The request object is created internally for each incoming request. It is different from the node.js request object received from the HTTP server callback (which is available in request.raw.req). The request object methods and properties change throughout the request lifecycle. Request events The request object supports the following events: 'peek' - emitted for each chunk of payload data read from the client connection. The event method signature is function(chunk, encoding). 'finish' - emitted when the request payload finished reading. The event method signature is function (). 'disconnect' - emitted when a request errors or aborts unexpectedly. var Crypto = require('crypto'); var Hapi = require('hapi'); var server = new Hapi.Server(); server.connection({ port: 80 }); server.ext('onRequest', function (request, reply) { var hash = Crypto.createHash('sha1'); request.on('peek', function (chunk) { hash.update(chunk); }); request.once('finish', function () { console.log(hash.digest('hex')); }); request.once('disconnect', function () { console.error('request aborted'); }); return reply.continue(); });*/ export class Request extends Events.EventEmitter { /** <API key> state. Provides a safe place to store application data without potential conflicts with the framework. Should not be used by plugins which should use plugins[name].*/ app: any; /** authentication information*/ auth: { /** true is the request has been successfully authenticated, otherwise false.*/ isAuthenticated: boolean; /** the credential object received during the authentication process. The presence of an object does not mean successful authentication. can be set in the validate function's callback.*/ credentials: any; /** an artifact object received from the authentication strategy and used in <API key> actions.*/ artifacts: any; /** the route authentication mode.*/ mode: any; /** the authentication error is failed and mode set to 'try'.*/ error: any; }; /** the connection used by this request*/ connection: ServerConnection; /** the node domain object used to protect against exceptions thrown in extensions, handlers and route prerequisites. Can be used to manually bind callback functions otherwise bound to other domains.*/ domain: any; /** the raw request headers (references request.raw.headers).*/ headers: IDictionary<string>; /** a unique request identifier (using the format '{now}:{connection.info.id}:{5 digits counter}').*/ id: number; /** request information */ info: { /** the request preferred encoding. */ acceptEncoding: string; /** if CORS is enabled for the route, contains the following: */ cors: { isOriginMatch: boolean; /** true if the request 'Origin' header matches the configured CORS restrictions. Set to false if no 'Origin' header is found or if it does not match. Note that this is only available after the 'onRequest' extension point as CORS is configured per-route and no routing decisions are made at that point in the request lifecycle. */ }; /** content of the HTTP 'Host' header (e.g. 'example.com:8080'). */ host: string; /** the hostname part of the 'Host' header (e.g. 'example.com').*/ hostname: string; /** request reception timestamp. */ received: number; /** content of the HTTP 'Referrer' (or 'Referer') header. */ referrer: string; /** remote client IP address. */ remoteAddress: string; /** remote client port. */ remotePort: number; /** request response timestamp (0 is not responded yet). */ responded: number; }; /** the request method in lower case (e.g. 'get', 'post'). */ method: string; /** the parsed content-type header. Only available when payload parsing enabled and no payload error occurred. */ mime: string; /** an object containing the values of params, query, and payload before any validation modifications made. Only set when input validation is performed.*/ orig: { params: any; query: any; payload: any; }; /** an object where each key is a path parameter name with matching value as described in Path parameters.*/ params: IDictionary<string>; /** an array containing all the path params values in the order they appeared in the path.*/ paramsArray: string[]; /** the request URI's path component. */ path: string; /** the request payload based on the route payload.output and payload.parse settings.*/ payload: stream.Readable | Buffer | any; /** plugin-specific state. Provides a place to store and pass request-level plugin data. The plugins is an object where each key is a plugin name and the value is the state.*/ plugins: any; /** an object where each key is the name assigned by a route prerequisites function. The values are the raw values provided to the continuation function as argument. For the wrapped response object, use responses.*/ pre: IDictionary<any>; /** the response object when set. The object can be modified but must not be assigned another object. To replace the response with another from within an extension point, use reply(response) to override with a different response. Contains null when no response has been set (e.g. when a request terminates prematurely when the client disconnects).*/ response: Response; preResponses: any; /**an object containing the query parameters.*/ query: any; /** an object containing the Node HTTP server objects. Direct interaction with these raw objects is not recommended.*/ raw: { req: http.IncomingMessage; res: http.ServerResponse; }; /** the route public interface.*/ route: IRoute; /** the server object. */ server: Server; /** an object containing parsed HTTP state information (cookies) where each key is the cookie name and value is the matching cookie content after processing using any registered cookie definition. */ state: any; /** complex object contining details on the url */ url: { /** null when i tested */ auth: any; /** null when i tested */ hash: any; /** null when i tested */ host: any; /** null when i tested */ hostname: any; href: string; path: string; /** path without search*/ pathname: string; /** null when i tested */ port: any; /** null when i tested */ protocol: any; /** querystring parameters*/ query: IDictionary<string>; /** querystring parameters as a string*/ search: string; /** null when i tested */ slashes: any; }; /** request.setUrl(url) Available only in 'onRequest' extension methods. Changes the request URI before the router begins processing the request where: url - the new request path value. var Hapi = require('hapi'); var server = new Hapi.Server(); server.connection({ port: 80 }); server.ext('onRequest', function (request, reply) { // Change all requests to '/test' request.setUrl('/test'); return reply.continue(); });*/ setUrl(url: string | url.Url): void; /** request.setMethod(method) Available only in 'onRequest' extension methods. Changes the request method before the router begins processing the request where: method - is the request HTTP method (e.g. 'GET'). var Hapi = require('hapi'); var server = new Hapi.Server(); server.connection({ port: 80 }); server.ext('onRequest', function (request, reply) { // Change all requests to 'GET' request.setMethod('GET'); return reply.continue(); });*/ setMethod(method: string): void; /** request.log(tags, [data, [timestamp]]) Always available. Logs request-specific events. When called, the server emits a 'request' event which can be used by other listeners or plugins. The arguments are: data - an optional message string or object with the application data being logged. timestamp - an optional timestamp expressed in milliseconds. Defaults to Date.now() (now). Any logs generated by the server internally will be emitted only on the 'request-internal' channel and will include the event.internal flag set to true. var Hapi = require('hapi'); var server = new Hapi.Server(); server.connection({ port: 80 }); server.on('request', function (request, event, tags) { if (tags.error) { console.log(event); } }); var handler = function (request, reply) { request.log(['test', 'error'], 'Test event'); return reply(); }; */ log(/** a string or an array of strings (e.g. ['error', 'database', 'read']) used to identify the event. Tags are used instead of log levels and provide a much more expressive mechanism for describing and filtering events.*/ tags: string | string[], /** an optional message string or object with the application data being logged.*/ data?: any, /** an optional timestamp expressed in milliseconds. Defaults to Date.now() (now).*/ timestamp?: number): void; /** request.getLog([tags], [internal]) Always available. Returns an array containing the events matching any of the tags specified (logical OR) request.getLog(); request.getLog('error'); request.getLog(['error', 'auth']); request.getLog(['error'], true); request.getLog(false);*/ getLog(/** is a single tag string or array of tag strings. If no tags specified, returns all events.*/ tags?: string, /** filters the events to only those with a matching event.internal value. If true, only internal logs are included. If false, only user event are included. Defaults to all events (undefined).*/ internal?: boolean): string[]; /** request.tail([name]) Available until immediately after the 'response' event is emitted. Adds a request tail which has to complete before the request lifecycle is complete where: name - an optional tail name used for logging purposes. Returns a tail function which must be called when the tail activity is completed. Tails are actions performed throughout the request lifecycle, but which may end after a response is sent back to the client. For example, a request may trigger a database update which should not delay sending back a response. However, it is still desirable to associate the activity with the request when logging it (or an error associated with it). When all tails completed, the server emits a 'tail' event. var Hapi = require('hapi'); var server = new Hapi.Server(); server.connection({ port: 80 }); var get = function (request, reply) { var dbTail = request.tail('write to database'); db.save('key', 'value', function () { dbTail(); }); return reply('Success!'); }; server.route({ method: 'GET', path: '/', handler: get }); server.on('tail', function (request) { console.log('Request completed including db activity'); });*/ tail(/** an optional tail name used for logging purposes.*/ name?: string): Function; } /** Response events The response object supports the following events: 'peek' - emitted for each chunk of data written back to the client connection. The event method signature is function(chunk, encoding). 'finish' - emitted when the response finished writing but before the client response connection is ended. The event method signature is function (). var Crypto = require('crypto'); var Hapi = require('hapi'); var server = new Hapi.Server(); server.connection({ port: 80 }); server.ext('onPreResponse', function (request, reply) { var response = request.response; if (response.isBoom) { return reply(); } var hash = Crypto.createHash('sha1'); response.on('peek', function (chunk) { hash.update(chunk); }); response.once('finish', function () { console.log(hash.digest('hex')); }); return reply.continue(); });*/ export class Response extends Events.EventEmitter { isBoom: boolean; /** the HTTP response status code. Defaults to 200 (except for errors).*/ statusCode: number; /** an object containing the response headers where each key is a header field name. Note that this is an incomplete list of headers to be included with the response. Additional headers will be added once the response is prepare for transmission.*/ headers: IDictionary<string>; /** the value provided using the reply interface.*/ source: any; /** a string indicating the type of source with available values: 'plain' - a plain response such as string, number, null, or simple object (e.g. not a Stream, Buffer, or view). 'buffer' - a Buffer. 'view' - a view generated with reply.view(). 'file' - a file generated with reply.file() of via the directory handler. 'stream' - a Stream. 'promise' - a Promise object. */ variety: string; /** <API key> state. Provides a safe place to store application data without potential conflicts with the framework. Should not be used by plugins which should use plugins[name].*/ app: any; /** plugin-specific state. Provides a place to store and pass request-level plugin data. The plugins is an object where each key is a plugin name and the value is the state. */ plugins: any; /** settings - response handling flags: charset - the 'Content-Type' HTTP header 'charset' property. Defaults to 'utf-8'. encoding - the string encoding scheme used to serial data into the HTTP payload when source is a string or marshals into a string. Defaults to 'utf8'. passThrough - if true and source is a Stream, copies the statusCode and headers of the stream to the outbound response. Defaults to true. stringify - options used for source value requiring stringification. Defaults to no replacer and no space padding. ttl - if set, overrides the route cache expiration milliseconds value set in the route config. Defaults to no override. varyEtag - if true, a suffix will be automatically added to the 'ETag' header at transmission time (separated by a '-' character) when the HTTP 'Vary' header is present.*/ settings: { charset: string; encoding: string; passThrough: boolean; stringify: any; ttl: number; varyEtag: boolean; } /** sets the HTTP 'Content-Length' header (to avoid chunked transfer encoding) where: length - the header value. Must match the actual payload size.*/ bytes(length: number): Response; /** sets the 'Content-Type' HTTP header 'charset' property where: charset - the charset property value.*/ charset(charset: string): Response; /** sets the HTTP status code where: statusCode - the HTTP status code.*/ code(statusCode: number): Response; /** sets the HTTP status code to Created (201) and the HTTP 'Location' header where: uri - an absolute or relative URI used as the 'Location' header value.*/ created(uri: string): Response; /** encoding(encoding) - sets the string encoding scheme used to serial data into the HTTP payload where: encoding - the encoding property value (see node Buffer encoding).*/ encoding(encoding: string): Response; /** etag(tag, options) - sets the representation entity tag where: tag - the entity tag string without the double-quote. options - optional settings where: weak - if true, the tag will be prefixed with the 'W/' weak signifier. Weak tags will fail to match identical tags for the purpose of determining 304 response status. Defaults to false. vary - if true and content encoding is set or applied to the response (e.g 'gzip' or 'deflate'), the encoding name will be automatically added to the tag at transmission time (separated by a '-' character). Ignored when weak is true. Defaults to true.*/ etag(tag: string, options: { weak: boolean; vary: boolean; }): Response; /**header(name, value, options) - sets an HTTP header where: name - the header name. value - the header value. options - optional settings where: append - if true, the value is appended to any existing header value using separator. Defaults to false. separator - string used as separator when appending to an exiting value. Defaults to ','. override - if false, the header value is not set if an existing value present. Defaults to true.*/ header(name: string, value: string, options?: IHeaderOptions): Response; /** hold() - puts the response on hold until response.send() is called. Available only after reply() is called and until response.hold() is invoked once. */ hold(): Response; /** location(uri) - sets the HTTP 'Location' header where: uri - an absolute or relative URI used as the 'Location' header value.*/ location(uri: string): Response; /** redirect(uri) - sets an HTTP redirection response (302) and decorates the response with additional methods listed below, where: uri - an absolute or relative URI used to redirect the client to another resource. */ redirect(uri: string): Response; /** replacer(method) - sets the JSON.stringify() replacer argument where: method - the replacer function or array. Defaults to none.*/ replacer(method: Function | Array<Function>): Response; /** spaces(count) - sets the JSON.stringify() space argument where: count - the number of spaces to indent nested object keys. Defaults to no indentation. */ spaces(count: number): Response; /**state(name, value, [options]) - sets an HTTP cookie where: name - the cookie name. value - the cookie value. If no encoding is defined, must be a string. options - optional configuration. If the state was previously registered with the server using server.state(), the specified keys in options override those same keys in the server definition (but not others).*/ state(name: string, value: string, options?: any): Response; /** send() - resume the response which will be transmitted in the next tick. Available only after response.hold() is called and until response.send() is invoked once. */ send(): void; /** sets a string suffix when the response is process via JSON.stringify().*/ suffix(suffix: string): void; /** overrides the default route cache expiration rule for this response instance where: msec - the time-to-live value in milliseconds.*/ ttl(msec: number): void; /** type(mimeType) - sets the HTTP 'Content-Type' header where: mimeType - is the mime type. Should only be used to override the built-in default for each response type. */ type(mimeType: string): Response; /** clears the HTTP cookie by setting an expired value where: name - the cookie name. options - optional configuration for expiring cookie. If the state was previously registered with the server using server.state(), the specified keys in options override those same keys in the server definition (but not others).*/ unstate(name: string, options?: { [key: string]: string }): Response; /** adds the provided header to the list of inputs affected the response generation via the HTTP 'Vary' header where: header - the HTTP request header name.*/ vary(header: string): void; } /** When using the redirect() method, the response object provides these additional methods */ export class ResponseRedirect extends Response { /** sets the status code to 302 or 307 (based on the rewritable() setting) where: isTemporary - if false, sets status to permanent. Defaults to true.*/ temporary(isTemporary: boolean): void; /** sets the status code to 301 or 308 (based on the rewritable() setting) where: isPermanent - if true, sets status to temporary. Defaults to false. */ permanent(isPermanent: boolean): void; /** sets the status code to 301/302 for rewritable (allows changing the request method from 'POST' to 'GET') or 307/308 for non-rewritable (does not allow changing the request method from 'POST' to 'GET'). Exact code based on the temporary() or permanent() setting. Arguments: isRewritable - if false, sets to non-rewritable. Defaults to true. Permanent Temporary Rewritable 301 302(1) Non-rewritable 308(2) 307 Notes: 1. Default value. 2. Proposed code, not supported by all clients. */ rewritable(isRewritable: boolean): void; } /** info about a server connection */ export interface <API key> { /** - a unique connection identifier (using the format '{hostname}:{pid}:{now base36}').*/ id: string; /** - the connection creation timestamp.*/ created: number; /** - the connection start timestamp (0 when stopped).*/ started: number; /** the connection port based on the following rules: the configured port value before the server has been started. the actual port assigned when no port is configured or set to 0 after the server has been started.*/ port: number; /** - the host name the connection was configured to. Defaults to the operating system hostname when available, otherwise 'localhost'.*/ host: string; /** - the active IP address the connection was bound to after starting.Set to undefined until the server has been started or when using a non TCP port (e.g. UNIX domain socket).*/ address: string; /** - the protocol used: 'http' - HTTP. 'https' - HTTPS. 'socket' - UNIX domain socket or Windows named pipe.*/ protocol: string; uri: string; } /** * undocumented. The connection object constructed after calling server.connection(); * can be accessed via server.connections; or request.connection; */ export class ServerConnection extends Events.EventEmitter { domain: any; _events: { route: Function, domain: Function, _events: Function, _eventsCount: Function, _maxListeners: Function }; _eventsCount: number; settings: <API key>; server: Server; /** ex: "tcp" */ type: string; _started: boolean; /** dictionary of sockets */ _connections: { [ip_port: string]: any }; _onConnection: Function; registrations: any; _extensions: any; _requestCounter: { value: number; min: number; max: number }; _load: any; states: { settings: any; cookies: any; names: any[] }; auth: { connection: ServerConnection; _schemes: any; _strategies: any; settings: any; api: any; }; _router: any; MSPluginsCollection: any; applicationCache: any; addEventListener: any; info: <API key>; } type RequestExtPoints = "onRequest" | "onPreResponse" | "onPreAuth" | "onPostAuth" | "onPreHandler" | "onPostHandler" | "onPreResponse"; type ServerExtPoints = "onPreStart" | "onPostStart" | "onPreStop" | "onPostStop"; export class Server extends Events.EventEmitter { constructor(options?: IServerOptions); /** Provides a safe place to store server-specific run-time application data without potential conflicts with the framework internals. The data can be accessed whenever the server is accessible. Initialized with an empty object. var Hapi = require('hapi'); server = new Hapi.Server(); server.app.key = 'value'; var handler = function (request, reply) { return reply(request.server.app.key); }; */ app: any; connections: Array<ServerConnection>; info: <API key>; /** An object containing the process load metrics (when load.sampleInterval is enabled): rss - RSS memory usage. var Hapi = require('hapi'); var server = new Hapi.Server({ load: { sampleInterval: 1000 } }); console.log(server.load.rss);*/ load: { /** - event loop delay milliseconds.*/ eventLoopDelay: number; /** - V8 heap usage.*/ heapUsed: number; }; /** When the server contains exactly one connection, listener is the node HTTP server object of the sole connection. When the server contains more than one connection, each server.connections array member provides its own connection.listener. var Hapi = require('hapi'); var SocketIO = require('socket.io'); var server = new Hapi.Server(); server.connection({ port: 80 }); var io = SocketIO.listen(server.listener); io.sockets.on('connection', function(socket) { socket.emit({ msg: 'welcome' }); });*/ listener: http.Server; methods: IDictionary<Function>; mime: any; plugins: IDictionary<any>; /** server.realm The realm object contains server-wide or plugin-specific state that can be shared across various methods. For example, when calling server.bind(), the active realm settings.bind property is set which is then used by routes and extensions added at the same level (server root or plugin). Realms are a limited version of a sandbox where plugins can maintain state used by the framework when adding routes, extensions, and other properties. modifiers - when the server object is provided as an argument to the plugin register() method, modifiers provides the registration preferences passed the server.register() method and includes: route - routes preferences: prefix - the route path prefix used by any calls to server.route() from the server. vhost - the route virtual host settings used by any calls to server.route() from the server. plugin - the active plugin name (empty string if at the server root). plugins - plugin-specific state to be shared only among activities sharing the same active state. plugins is an object where each key is a plugin name and the value is the plugin state. settings - settings overrides: files.relativeTo bind The server.realm object should be considered read-only and must not be changed directly except for the plugins property can be directly manipulated by the plugins (each setting its own under plugins[name]). exports.register = function (server, options, next) { console.log(server.realm.modifiers.route.prefix); return next(); };*/ realm: IServerRealm; /** server.root The root server object containing all the connections and the root server methods (e.g. start(), stop(), connection()).*/ root: Server; settings: IServerOptions; version: string; /** server.after(method, [dependencies]) Adds a method to be called after all the plugin dependencies have been registered and before the server starts (only called if the server is started) where: after - the method with signature function(plugin, next) where: server - server object the after() method was called on. next - the callback function the method must call to return control over to the application and complete the registration process. The function signature is function(err) where: err - internal error which is returned back via the server.start() callback. dependencies - a string or array of string with the plugin names to call this method after their after() methods. There is no requirement for the other plugins to be registered. Setting dependencies only arranges the after methods in the specified order. var Hapi = require('hapi'); var server = new Hapi.Server(); server.connection({ port: 80 }); server.after(function () { // Perform some pre-start logic }); server.start(function (err) { // After method already executed }); server.auth.default(options)*/ after(method: (plugin: any, next: (err: any) => void) => void, dependencies: string | string[]): void; auth: { /** server.auth.api An object where each key is a strategy name and the value is the exposed strategy API. Available on when the authentication scheme exposes an API by returning an api key in the object returned from its implementation function. When the server contains more than one connection, each server.connections array member provides its own connection.auth.api object. const server = new Hapi.Server(); server.connection({ port: 80 }); const scheme = function (server, options) { return { api: { settings: { x: 5 } }, authenticate: function (request, reply) { const req = request.raw.req; const authorization = req.headers.authorization; if (!authorization) { return reply(Boom.unauthorized(null, 'Custom')); } return reply.continue({ credentials: { user: 'john' } }); } }; }; server.auth.scheme('custom', scheme); server.auth.strategy('default', 'custom'); console.log(server.auth.api.default.settings.x); // 5 */ api: { [index: string]: any; } /** server.auth.default(options) Sets a default strategy which is applied to every route where: options - a string with the default strategy name or an object with a specified strategy or strategies using the same format as the route auth handler options. The default does not apply when the route config specifies auth as false, or has an authentication strategy configured. Otherwise, the route authentication config is applied to the defaults. Note that the default only applies at time of route configuration, not at runtime. Calling default() after adding a route will have no impact on routes added prior. The default auth strategy configuration can be accessed via connection.auth.settings.default. var server = new Hapi.Server(); server.connection({ port: 80 }); server.auth.scheme('custom', scheme); server.auth.strategy('default', 'custom'); server.auth.default('default'); server.route({ method: 'GET', path: '/', handler: function (request, reply) { return reply(request.auth.credentials.user); } });*/ default(options: string): void; default(options: { strategy: string }): void; default(options: { strategies: string[] }): void; /** server.auth.scheme(name, scheme) Registers an authentication scheme where: name - the scheme name. scheme - the method implementing the scheme with signature function(server, options) where: server - a reference to the server object the scheme is added to. options - optional scheme settings used to instantiate a strategy.*/ scheme(name: string, /** When the scheme authenticate() method implementation calls reply() with an error condition, the specifics of the error affect whether additional authentication strategies will be attempted if configured for the route. If the err returned by the reply() method includes a message, no additional strategies will be attempted. If the err does not include a message but does include a scheme name (e.g. Boom.unauthorized(null, 'Custom')), additional strategies will be attempted in order of preference. n the scheme payload() method returns an error with a message, it means payload validation failed due to bad payload. If the error has no message but includes a scheme name (e.g. Boom.unauthorized(null, 'Custom')), authentication may still be successful if the route auth.payload configuration is set to 'optional'. server = new Hapi.Server(); server.connection({ port: 80 }); scheme = function (server, options) { urn { authenticate: function (request, reply) { req = request.raw.req; var authorization = req.headers.authorization; if (!authorization) { return reply(Boom.unauthorized(null, 'Custom')); } urn reply(null, { credentials: { user: 'john' } }); } }; }; */ scheme: (server: Server, options: any) => IServerAuthScheme): void; /** server.auth.strategy(name, scheme, [mode], [options]) Registers an authentication strategy where: name - the strategy name. scheme - the scheme name (must be previously registered using server.auth.scheme()). mode - if true, the scheme is automatically assigned as a required strategy to any route without an auth config. Can only be assigned to a single server strategy. Value must be true (which is the same as 'required') or a valid authentication mode ('required', 'optional', 'try'). Defaults to false. options - scheme options based on the scheme requirements. var server = new Hapi.Server(); server.connection({ port: 80 }); server.auth.scheme('custom', scheme); server.auth.strategy('default', 'custom'); server.route({ method: 'GET', path: '/', config: { auth: 'default', handler: function (request, reply) { return reply(request.auth.credentials.user); } } });*/ strategy(name: string, scheme: any, mode?: boolean | string, options?: any): void; /** server.auth.test(strategy, request, next) Tests a request against an authentication strategy where: strategy - the strategy name registered with server.auth.strategy(). request - the request object. next - the callback function with signature function(err, credentials) where: err - the error if authentication failed. credentials - the authentication credentials object if authentication was successful. Note that the test() method does not take into account the route authentication configuration. It also does not perform payload authentication. It is limited to the basic strategy authentication execution. It does not include verifying scope, entity, or other route properties. var server = new Hapi.Server(); server.connection({ port: 80 }); server.auth.scheme('custom', scheme); server.auth.strategy('default', 'custom'); server.route({ method: 'GET', path: '/', handler: function (request, reply) { request.server.auth.test('default', request, function (err, credentials) { if (err) { return reply({ status: false }); } return reply({ status: true, user: credentials.name }); }); } });*/ test(strategy: string, request: Request, next: (err: any, credentials: any) => void): void; }; /** server.bind(context) Sets a global context used as the default bind object when adding a route or an extension where: context - the object used to bind this in handler and extension methods. When setting context inside a plugin, the context is applied only to methods set up by the plugin. Note that the context applies only to routes and extensions added after it has been set. var handler = function (request, reply) { return reply(this.message); }; exports.register = function (server, options, next) { var bind = { message: 'hello' }; server.bind(bind); server.route({ method: 'GET', path: '/', handler: handler }); return next(); };*/ bind(context: any): void; cache(options: ICatBoxCacheOptions): void; connection(options: <API key>): Server; /** server.decorate(type, property, method, [options]) Extends various framework interfaces with custom methods where: type - the interface being decorated. Supported types: 'reply' - adds methods to the reply interface. 'server' - adds methods to the Server object. property - the object decoration key name. method - the extension function. options - if the type is 'request', supports the following optional settings: 'apply' - if true, the method function is invoked using the signature function(request) where request is the current request object and the returned value is assigned as the decoration. Note that decorations apply to the entire server and all its connections regardless of current selection. var Hapi = require('hapi'); var server = new Hapi.Server(); server.connection({ port: 80 }); server.decorate('reply', 'success', function () { return this.response({ status: 'ok' }); }); server.route({ method: 'GET', path: '/', handler: function (request, reply) { return reply.success(); } });*/ decorate(type: string, property: string, method: Function, options?: { apply: boolean }): void; /** server.dependency(dependencies, [after]) Used within a plugin to declares a required dependency on other plugins where: dependencies - a single string or array of plugin name strings which must be registered in order for this plugin to operate. Plugins listed must be registered before the server is started. Does not provide version dependency which should be implemented using npm peer dependencies. after - an optional function called after all the specified dependencies have been registered and before the server starts. The function is only called if the server is started. If a circular dependency is detected, an exception is thrown (e.g. two plugins each has an after function to be called after the other). The function signature is function(server, next) where: server - the server the dependency() method was called on. next - the callback function the method must call to return control over to the application and complete the registration process. The function signature is function(err) where: err - internal error condition, which is returned back via the server.start() callback. exports.register = function (server, options, next) { server.dependency('yar', after); return next(); }; var after = function (server, next) { // Additional plugin registration logic return next(); };*/ dependency(dependencies: string | string[], after?: (server: Server, next: (err: any) => void) => void): void; /** server.expose(key, value) Used within a plugin to expose a property via server.plugins[name] where: key - the key assigned (server.plugins[name][key]). value - the value assigned. exports.register = function (server, options, next) { server.expose('util', function () { console.log('something'); }); return next(); };*/ expose(key: string, value: any): void; /** server.expose(obj) Merges a deep copy of an object into to the existing content of server.plugins[name] where: obj - the object merged into the exposed properties container. exports.register = function (server, options, next) { server.expose({ util: function () { console.log('something'); } }); return next(); };*/ expose(obj: any): void; /** server.ext(event, method, [options]) Registers an extension function in one of the available extension points where: event - the event name. method - a function or an array of functions to be executed at a specified point during request processing. The required extension function signature is function(request, reply) where: request - the request object. NOTE: Access the Response via request.response reply - the reply interface which is used to return control back to the framework. To continue normal execution of the request lifecycle, reply.continue() must be called. To abort processing and return a response to the client, call reply(value) where value is an error or any other valid response. this - the object provided via options.bind or the current active context set with server.bind(). options - an optional object with the following: before - a string or array of strings of plugin names this method must execute before (on the same event). Otherwise, extension methods are executed in the order added. after - a string or array of strings of plugin names this method must execute after (on the same event). Otherwise, extension methods are executed in the order added. bind - a context object passed back to the provided method (via this) when called. var Hapi = require('hapi'); var server = new Hapi.Server(); server.connection({ port: 80 }); server.ext('onRequest', function (request, reply) { // Change all requests to '/test' request.setUrl('/test'); return reply.continue(); }); var handler = function (request, reply) { return reply({ status: 'ok' }); }; server.route({ method: 'GET', path: '/test', handler: handler }); server.start(); // All requests will get routed to '/test'*/ ext(event: RequestExtPoints, method: (request: Request, reply: IReply, bind?: any) => void, options?: { before: string | string[]; after: string | string[]; bind?: any }): void; ext<T>(event: RequestExtPoints, method: (request: Request, reply: IStrictReply<T>, bind?: any) => void, options?: { before: string | string[]; after: string | string[]; bind?: any }): void; ext(event: ServerExtPoints, method: (server: Server, next: (err?: any) => void, bind?: any) => void, options?: { before: string | string[]; after: string | string[]; bind?: any }): void; /** server.handler(name, method) Registers a new handler type to be used in routes where: name - string name for the handler being registered. Cannot override the built-in handler types (directory, file, proxy, and view) or any previously registered type. method - the function used to generate the route handler using the signature function(route, options) where: route - the route public interface object. options - the configuration object provided in the handler config. var Hapi = require('hapi'); var server = new Hapi.Server(); server.connection({ host: 'localhost', port: 8000 }); // Defines new handler for routes on this server server.handler('test', function (route, options) { return function (request, reply) { return reply('new handler: ' + options.msg); } }); server.route({ method: 'GET', path: '/', handler: { test: { msg: 'test' } } }); server.start(); The method function can have a defaults object or function property. If the property is set to an object, that object is used as the default route config for routes using this handler. If the property is set to a function, the function uses the signature function(method) and returns the route default configuration. var Hapi = require('hapi'); var server = new Hapi.Server(); server.connection({ host: 'localhost', port: 8000 }); var handler = function (route, options) { return function (request, reply) { return reply('new handler: ' + options.msg); } }; // Change the default payload processing for this handler handler.defaults = { payload: { output: 'stream', parse: false } }; server.handler('test', handler);*/ handler<THandlerConfig>(name: string, method: (route: IRoute, options: THandlerConfig) => ISessionHandler): void; /** server.initialize([callback]) Initializes the server (starts the caches, finalizes plugin registration) but does not start listening on the connection ports, where: - `callback` - the callback method when server initialization is completed or failed with the signature `function(err)` where: - `err` - any initialization error condition. If no `callback` is provided, a `Promise` object is returned. Note that if the method fails and the callback includes an error, the server is considered to be in an undefined state and should be shut down. In most cases it would be impossible to fully recover as the various plugins, caches, and other event listeners will get confused by repeated attempts to start the server or make assumptions about the healthy state of the environment. It is recommended to assert that no error has been returned after calling `initialize()` to abort the process when the server fails to start properly. If you must try to resume after an error, call `server.stop()` first to reset the server state. */ initialize(callback?: (error: any) => void): IPromise<void>; inject: IServerInject; /** server.log(tags, [data, [timestamp]]) Logs server events that cannot be associated with a specific request. When called the server emits a 'log' event which can be used by other listeners or plugins to record the information or output to the console. The arguments are: tags - a string or an array of strings (e.g. ['error', 'database', 'read']) used to identify the event. Tags are used instead of log levels and provide a much more expressive mechanism for describing and filtering events. Any logs generated by the server internally include the 'hapi' tag along with event-specific information. data - an optional message string or object with the application data being logged. timestamp - an optional timestamp expressed in milliseconds. Defaults to Date.now() (now). var Hapi = require('hapi'); var server = new Hapi.Server(); server.connection({ port: 80 }); server.on('log', function (event, tags) { if (tags.error) { console.log(event); } }); server.log(['test', 'error'], 'Test event');*/ log(tags: string | string[], data?: string | any, timestamp?: number): void; /**server.lookup(id) When the server contains exactly one connection, looks up a route configuration where: id - the route identifier as set in the route options. returns the route public interface object if found, otherwise null. var server = new Hapi.Server(); server.connection(); server.route({ method: 'GET', path: '/', config: { handler: function (request, reply) { return reply(); }, id: 'root' } }); var route = server.lookup('root'); When the server contains more than one connection, each server.connections array member provides its own connection.lookup() method.*/ lookup(id: string): IRoute; /** server.match(method, path, [host]) When the server contains exactly one connection, looks up a route configuration where: method - the HTTP method (e.g. 'GET', 'POST'). path - the requested path (must begin with '/'). host - optional hostname (to match against routes with vhost). returns the route public interface object if found, otherwise null. var server = new Hapi.Server(); server.connection(); server.route({ method: 'GET', path: '/', config: { handler: function (request, reply) { return reply(); }, id: 'root' } }); var route = server.match('get', '/'); When the server contains more than one connection, each server.connections array member provides its own connection.match() method.*/ match(method: string, path: string, host?: string): IRoute; /** server.method(name, method, [options]) Registers a server method. Server methods are functions registered with the server and used throughout the application as a common utility. Their advantage is in the ability to configure them to use the built-in cache and share across multiple request handlers without having to create a common module. Methods are registered via server.method(name, method, [options]) var Hapi = require('hapi'); var server = new Hapi.Server(); server.connection({ port: 80 }); // Simple arguments var add = function (a, b, next) { return next(null, a + b); }; server.method('sum', add, { cache: { expiresIn: 2000 } }); server.methods.sum(4, 5, function (err, result) { console.log(result); }); // Object argument var addArray = function (array, next) { var sum = 0; array.forEach(function (item) { sum += item; }); return next(null, sum); }; server.method('sumObj', addArray, { cache: { expiresIn: 2000 }, generateKey: function (array) { return array.join(','); } }); server.methods.sumObj([5, 6], function (err, result) { console.log(result); }); // Synchronous method with cache var addSync = function (a, b) { return a + b; }; server.method('sumSync', addSync, { cache: { expiresIn: 2000 }, callback: false }); server.methods.sumSync(4, 5, function (err, result) { console.log(result); }); */ method(/** a unique method name used to invoke the method via server.methods[name]. When configured with caching enabled, server.methods[name].cache.drop(arg1, arg2, ..., argn, callback) can be used to clear the cache for a given key. Supports using nested names such as utils.users.get which will automatically create the missing path under server.methods and can be accessed for the previous example via server.methods.utils.users.get.*/ name: string, method: IServerMethod, options?: <API key>): void; /**server.method(methods) Registers a server method function as described in server.method() using a configuration object where: methods - an object or an array of objects where each one contains: name - the method name. method - the method function. options - optional settings. var add = function (a, b, next) { next(null, a + b); }; server.method({ name: 'sum', method: add, options: { cache: { expiresIn: 2000 } } });*/ method(methods: { name: string; method: IServerMethod; options?: <API key> } | Array<{ name: string; method: IServerMethod; options?: <API key> }>): void; /**server.path(relativeTo) Sets the path prefix used to locate static resources (files and view templates) when relative paths are used where: relativeTo - the path prefix added to any relative file path starting with '.'. Note that setting a path within a plugin only applies to resources accessed by plugin methods. If no path is set, the connection files.relativeTo configuration is used. The path only applies to routes added after it has been set. exports.register = function (server, options, next) { server.path(__dirname + '../static'); server.route({ path: '/file', method: 'GET', handler: { file: './test.html' } }); next(); };*/ path(relativeTo: string): void; /** * server.register(plugins, [options], callback) * Registers a plugin where: * plugins - an object or array of objects where each one is either: * a plugin registration function. * an object with the following: * register - the plugin registration function. * options - optional options passed to the registration function when called. * options - optional registration options (different from the options passed to the registration function): * select - a string or array of string labels used to pre-select connections for plugin registration. * routes - modifiers applied to each route added by the plugin: * prefix - string added as prefix to any route path (must begin with '/'). If a plugin registers a child plugin the prefix is passed on to the child or is added in front of the child-specific prefix. * vhost - virtual host string (or array of strings) applied to every route. The outer-most vhost overrides the any nested configuration. * callback - the callback function with signature function(err) where: * err - an error returned from the registration function. Note that exceptions thrown by the registration function are not handled by the framework. * * If no callback is provided, a Promise object is returned. */ register(plugins: any | any[], options: { select: string | string[]; routes: { prefix: string; vhost?: string | string[] }; }, callback: (err: any) => void): void; register(plugins: any | any[], options: { select: string | string[]; routes: { prefix: string; vhost?: string | string[] }; }): IPromise<any>; register(plugins: any | any[], callback: (err: any) => void): void; register(plugins: any | any[]): IPromise<any>; /**server.render(template, context, [options], callback) Utilizes the server views manager to render a template where: template - the template filename and path, relative to the views manager templates path (path or relativeTo). context - optional object used by the template to render context-specific result. Defaults to no context ({}). options - optional object used to override the views manager configuration. callback - the callback function with signature function (err, rendered, config) where: err - the rendering error if any. rendered - the result view string. config - the configuration used to render the template. var Hapi = require('hapi'); var server = new Hapi.Server(); server.connection({ port: 80 }); server.views({ engines: { html: require('handlebars') }, path: __dirname + '/templates' }); var context = { title: 'Views Example', message: 'Hello, World' }; server.render('hello', context, function (err, rendered, config) { console.log(rendered); });*/ render(template: string, context: any, options: any, callback: (err: any, rendered: any, config: any) => void): void; /** server.route(options) Adds a connection route where: options - a route configuration object or an array of configuration objects. var Hapi = require('hapi'); var server = new Hapi.Server(); server.connection({ port: 80 }); server.route({ method: 'GET', path: '/', handler: function (request, reply) { return reply('ok'); } }); server.route([ { method: 'GET', path: '/1', handler: function (request, reply) { return reply('ok'); } }, { method: 'GET', path: '/2', handler: function (request, reply) { return reply('ok'); } } ]);*/ route(options: IRouteConfiguration): void; route(options: IRouteConfiguration[]): void; /**server.select(labels) Selects a subset of the server's connections where: labels - a single string or array of strings of labels used as a logical OR statement to select all the connections with matching labels in their configuration. Returns a server object with connections set to the requested subset. Selecting again on a selection operates as a logic AND statement between the individual selections. var Hapi = require('hapi'); var server = new Hapi.Server(); server.connection({ port: 80, labels: ['a'] }); server.connection({ port: 8080, labels: ['b'] }); server.connection({ port: 8081, labels: ['c'] }); server.connection({ port: 8082, labels: ['c','d'] }); var a = server.select('a'); // The server with port 80 var ab = server.select(['a','b']); // A list of servers containing the server with port 80 and the server with port 8080 var c = server.select('c'); // A list of servers containing the server with port 8081 and the server with port 8082 */ select(labels: string | string[]): Server | Server[]; /** server.start([callback]) Starts the server connections by listening for incoming requests on the configured port of each listener (unless the connection was configured with autoListen set to false), where: callback - optional callback when server startup is completed or failed with the signature function(err) where: err - any startup error condition. var Hapi = require('hapi'); var server = new Hapi.Server(); server.connection({ port: 80 }); server.start(function (err) { console.log('Server started at: ' + server.info.uri); });*/ start(callback?: (err: any) => void): IPromise<void>; /** server.state(name, [options]) HTTP state management uses client cookies to persist a state across multiple requests. Registers a cookie definitions State defaults can be modified via the server connections.routes.state configuration option. var Hapi = require('hapi'); var server = new Hapi.Server(); server.connection({ port: 80 }); // Set cookie definition server.state('session', { ttl: 24 * 60 * 60 * 1000, // One day isSecure: true, path: '/', encoding: 'base64json' }); // Set state in route handler var handler = function (request, reply) { var session = request.state.session; if (!session) { session = { user: 'joe' }; } session.last = Date.now(); return reply('Success').state('session', session); }; Registered cookies are automatically parsed when received. Parsing rules depends on the route state.parse configuration. If an incoming registered cookie fails parsing, it is not included in request.state, regardless of the state.failAction setting. When state.failAction is set to 'log' and an invalid cookie value is received, the server will emit a 'request-internal' event. To capture these errors subscribe to the 'request-internal' events and filter on 'error' and 'state' tags: var Hapi = require('hapi'); var server = new Hapi.Server(); server.connection({ port: 80 }); server.on('request-internal', function (request, event, tags) { if (tags.error && tags.state) { console.error(event); } }); */ state(name: string, options?: ICookieSettings): void; /** server.stop([options], [callback]) Stops the server's connections by refusing to accept any new connections or requests (existing connections will continue until closed or timeout), where: options - optional object with: timeout - overrides the timeout in millisecond before forcefully terminating a connection. Defaults to 5000 (5 seconds). callback - optional callback method with signature function() which is called once all the connections have ended and it is safe to exit the process. var Hapi = require('hapi'); var server = new Hapi.Server(); server.connection({ port: 80 }); server.stop({ timeout: 60 * 1000 }, function () { console.log('Server stopped'); });*/ stop(options?: { timeout: number }, callback?: () => void): IPromise<void>; /**server.table([host]) Returns a copy of the routing table where: host - optional host to filter routes matching a specific virtual host. Defaults to all virtual hosts. The return value is an array where each item is an object containing: info - the connection.info the connection the table was generated for. labels - the connection labels. table - an array of routes where each route contains: settings - the route config with defaults applied. method - the HTTP method in lower case. path - the route path. Note that if the server has not been started and multiple connections use port 0, the table items will override each other and will produce an incomplete result. var Hapi = require('hapi'); var server = new Hapi.Server(); server.connection({ port: 80, host: 'example.com' }); server.route({ method: 'GET', path: '/example', handler: function (request, reply) { return reply(); } }); var table = server.table(); When calling connection.table() directly on each connection, the return value is the same as the array table item value of an individual connection: var Hapi = require('hapi'); var server = new Hapi.Server(); server.connection({ port: 80, host: 'example.com' }); server.route({ method: 'GET', path: '/example', handler: function (request, reply) { return reply(); } }); var table = server.connections[0].table(); //[ // { // method: 'get', // path: '/example', // settings: { ... } // } //] */ table(host?: any): IConnectionTable; /**server.views(options) Initializes the server views manager var Hapi = require('hapi'); var server = new Hapi.Server(); server.views({ engines: { html: require('handlebars'), jade: require('jade') }, path: '/static/templates' }); When server.views() is called within a plugin, the views manager is only available to plugins methods.*/ views(options: <API key>): void; } }
// mod parser::atom TEST use super::Status; use super::{parse_dot, parse_eof, parse_literal, parse_match, MatchRules}; #[test] fn <API key>() { let rules = rules!{}; let status_init = Status::init("aaaaaaaaaaaaaaaa", &rules); let (status_end, _) = parse_literal(status_init, "aaa").ok().unwrap(); assert!(status_end.pos.col == 3); assert!(status_end.pos.n == 3); assert!(status_end.pos.row == 0); } #[test] fn <API key>() { let rules = rules!{}; let status_init = Status::init("abcdefghij", &rules); let (status_end, _) = parse_literal(status_init, "abc").ok().unwrap(); assert_eq!(status_end.pos.col, 3); assert_eq!(status_end.pos.n, 3); assert_eq!(status_end.pos.row, 0); } #[test] fn <API key>() { let rules = rules!{}; let status_init = Status::init("abcdefghij", &rules); assert!(parse_literal(status_init, "bbb").is_err()); } #[test] fn <API key>() { let rules = rules!{}; let status_init = Status::init("abcdefghij", &rules); assert!(parse_literal(status_init, "abd").is_err()); } #[test] fn <API key>() { let rules = rules!{}; let status_init = Status::init("abcd", &rules); assert!(parse_literal(status_init, "abcdefghij").is_err()); } #[test] fn <API key>() { let rules = rules!{}; let status_init = Status::init( "aa aaaaaaaaaaaaaa", &rules, ); let (status_end, _) = parse_literal( status_init, "aa a", ).ok() .unwrap(); assert!(status_end.pos.col == 1); assert!(status_end.pos.row == 1); } #[test] fn test_parse_dot() { let rules = rules!{}; let status = Status::init("ab", &rules); let (status, _) = parse_dot(status).ok().unwrap(); assert!(status.pos.col == 1); assert!(status.pos.n == 1); assert!(status.pos.row == 0); let (status, _) = parse_dot(status).ok().unwrap(); assert!(status.pos.col == 2); assert!(status.pos.n == 2); assert!(status.pos.row == 0); assert!(parse_dot(status).is_err()); } #[test] fn test_parse_match_ok() { let rules = rules!{}; let status = Status::init("a f0ghi", &rules); let match_rules = MatchRules::new().with_chars("54321ed_cba"); let (status, _) = parse_match(status, &match_rules).ok().unwrap(); assert_eq!(status.pos.col, 1); assert_eq!(status.pos.n, 1); assert_eq!(status.pos.row, 0); let (status, _) = parse_dot(status).ok().unwrap(); let match_rules = MatchRules::new().with_bound_chars(vec![('f', 'g'), ('h', 'j')]); let (status, _) = parse_match(status, &match_rules).ok().unwrap(); assert_eq!(status.pos.col, 3); assert_eq!(status.pos.n, 3); assert_eq!(status.pos.row, 0); assert!(parse_match(status, &match_rules).is_err()); } #[test] fn <API key>() { let rules = rules!{}; let status = Status::init("a9", &rules); let match_rules = MatchRules::new().with_chars("ed_cba"); let (status, _) = parse_match(status, &match_rules).ok().unwrap(); assert_eq!(status.pos.col, 1); assert_eq!(status.pos.n, 1); assert_eq!(status.pos.row, 0); let match_rules = MatchRules::new().with_bound_chars(vec![('a', 'z'), ('0', '8')]); assert!(parse_match(status, &match_rules).is_err()); } #[test] fn <API key>() { let rules = rules!{}; let status = Status::init("a", &rules); let match_rules = MatchRules::new().with_bound_chars(vec![('a', 'z'), ('0', '9')]); let (status, _) = parse_match(status, &match_rules).ok().unwrap(); assert!(parse_eof(status).is_ok()); } #[test] fn <API key>() { let rules = rules!{}; let status = Status::init("ab", &rules); let match_rules = MatchRules::new().with_bound_chars(vec![('a', 'z'), ('0', '9')]); let (status, _) = parse_match(status, &match_rules).ok().unwrap(); assert!(parse_eof(status).is_err()); }
#include "wrap.h" #include <stdio.h> #include <stdlib.h> #include <string.h> #include <errno.h> /* Simple malloc() wrapper */ void *Malloc(size_t size) { void *buffer; buffer = malloc(size); if(buffer == NULL) { fprintf(stderr, "malloc() failed: %s\n", strerror(errno)); exit(EXIT_FAILURE); } memset(buffer, 0, size); return(buffer); }
<?php namespace Symfony\Component\Routing\Generator; use Symfony\Component\Routing\RouteCollection; use Symfony\Component\Routing\RequestContext; use Symfony\Component\Routing\Exception\<API key>; use Symfony\Component\Routing\Exception\<API key>; use Symfony\Component\Routing\Exception\<API key>; use Psr\Log\LoggerInterface; class UrlGenerator implements <API key>, <API key> { /** * @var RouteCollection */ protected $routes; /** * @var RequestContext */ protected $context; /** * @var bool|null */ protected $strictRequirements = true; /** * @var LoggerInterface|null */ protected $logger;
#ifndef COMPAT_H_RD1Z6YZA #define COMPAT_H_RD1Z6YZA namespace oak { inline void set_thread_name (char const* threadName) { pthread_setname_np(threadName); } inline size_t get_gestalt (OSType selector) { SInt32 res; Gestalt(selector, &res); return res; } inline size_t os_major () { return get_gestalt(<API key>); } inline size_t os_minor () { return get_gestalt(<API key>); } inline size_t os_patch () { return get_gestalt(<API key>); } inline OSStatus <API key> (AuthorizationRef authorization, std::string const& pathToTool, AuthorizationFlags options, char* const* arguments, FILE** communicationsPipe) { #pragma clang diagnostic push #pragma clang diagnostic ignored "-<API key>" return <API key>(authorization, pathToTool.c_str(), options, arguments, communicationsPipe); #pragma clang diagnostic pop } } /* oak */ #endif /* end of include guard: COMPAT_H_RD1Z6YZA */
#ifndef <API key> #define <API key> #include <stdint.h> extern int has_altivec(void); void <API key>(uint8_t *block, const uint8_t *pixels, int line_size, int h); void <API key>(uint8_t *block, const uint8_t *pixels, int line_size, int h); #endif /* <API key> */
// Team Functionality Add-ons for the Process Dashboard // This program is free software; you can redistribute it and/or // as published by the Free Software Foundation; either version 3 // file in the project root directory for more information. // This program is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // The author(s) may be contacted at: // processdash@tuma-solutions.com // processdash-devel@lists.sourceforge.net package teamdash.wbs.columns; import teamdash.wbs.WBSModel; import teamdash.wbs.WBSNode; import teamdash.wbs.WBSNodeTest; public class ErrorNotesColumn extends AbstractNotesColumn { /** The ID we use for this column in the data model */ public static final String COLUMN_ID = "Error Notes"; /** The attribute this column uses to store task notes for a WBS node */ public static final String VALUE_ATTR = "Error Notes"; public ErrorNotesColumn(String authorName) { super(VALUE_ATTR, authorName); this.columnID = COLUMN_ID; this.columnName = resources.getString("Error_Notes.Name"); } @Override protected String getEditDialogTitle() { return columnName; } @Override protected Object getEditDialogHeader(WBSNode node) { return new Object[] { resources.getStrings("Error_Notes.Edit_Dialog_Header"), " " }; } public static String getTextAt(WBSNode node) { return getTextAt(node, VALUE_ATTR); } public static String getTooltipAt(WBSNode node, boolean includeByline) { return getTooltipAt(node, includeByline, VALUE_ATTR); } /** * Find nodes that have errors attached, and expand their ancestors as * needed to ensure that they are visible. * * @param wbs * the WBSModel * @param belowNode * an optional starting point for the search, to limit expansion * to a particular branch of the tree; can be null to search from * the root * @param condition * an optional condition to test; only nodes matching the * condition will be made visible. Can be null to show all nodes * with errors */ public static void showNodesWithErrors(WBSModel wbs, WBSNode belowNode, WBSNodeTest condition) { if (belowNode == null) belowNode = wbs.getRoot(); for (WBSNode node : wbs.getDescendants(belowNode)) { String errText = getTextAt(node, VALUE_ATTR); if (errText != null && errText.trim().length() > 0) { if (condition == null || condition.test(node)) wbs.makeVisible(node); } } } }
#ifndef ENGINIOCLIENT_H #define ENGINIOCLIENT_H #include <Enginio/<API key>.h> #include <Enginio/<API key>.h> #include <QtCore/qjsonobject.h> QT_BEGIN_NAMESPACE class <API key>; class QNetworkReply; class EnginioReply; class <API key>; class <API key> EnginioClient : public <API key> { Q_OBJECT Q_ENUMS(Enginio::Operation) // TODO remove me QTBUG-33577 Q_ENUMS(Enginio::AuthenticationState) // TODO remove me QTBUG-33577 Q_DECLARE_PRIVATE(EnginioClient) public: explicit EnginioClient(QObject *parent = 0); ~EnginioClient(); Q_INVOKABLE EnginioReply *customRequest(const QUrl &url, const QByteArray &httpOperation, const QJsonObject &data = QJsonObject()); Q_INVOKABLE EnginioReply *fullTextSearch(const QJsonObject &query); Q_INVOKABLE EnginioReply *query(const QJsonObject &query, const Enginio::Operation operation = Enginio::ObjectOperation); Q_INVOKABLE EnginioReply *create(const QJsonObject &object, const Enginio::Operation operation = Enginio::ObjectOperation); Q_INVOKABLE EnginioReply *update(const QJsonObject &object, const Enginio::Operation operation = Enginio::ObjectOperation); Q_INVOKABLE EnginioReply *remove(const QJsonObject &object, const Enginio::Operation operation = Enginio::ObjectOperation); Q_INVOKABLE EnginioReply *uploadFile(const QJsonObject &associatedObject, const QUrl &file); Q_INVOKABLE EnginioReply *downloadUrl(const QJsonObject &object); Q_SIGNALS: void <API key>(EnginioReply *reply) const; void <API key>(EnginioReply *reply) const; void sessionTerminated() const; void finished(EnginioReply *reply); void error(EnginioReply *reply); }; QT_END_NAMESPACE #endif // ENGINIOCLIENT_H
<?php require_once(ROOT_DIR . 'lib/Graphics/Image.php'); require_once(ROOT_DIR . 'lib/Graphics/ImageFactory.php'); ?>
package gobol // Error - defines a common http error interface type Error interface { error StatusCode() int Message() string Package() string Function() string ErrorCode() string }
// <API key>: Apache-2.0 WITH LLVM-exception // This tablegen backend is responsible for emitting a description of a target // register bank for a code generator. #include "llvm/ADT/BitVector.h" #include "llvm/Support/Debug.h" #include "llvm/TableGen/Error.h" #include "llvm/TableGen/Record.h" #include "llvm/TableGen/TableGenBackend.h" #include "CodeGenHwModes.h" #include "CodeGenRegisters.h" #include "CodeGenTarget.h" #define DEBUG_TYPE "<API key>" using namespace llvm; namespace { class RegisterBank { A vector of register classes that are included in the register bank. typedef std::vector<const <API key> *> RegisterClassesTy; private: const Record &TheDef; The register classes that are covered by the register bank. RegisterClassesTy RCs; The register class with the largest register size. const <API key> *<API key>; public: RegisterBank(const Record &TheDef) : TheDef(TheDef), RCs(), <API key>(nullptr) {} Get the human-readable name for the bank. StringRef getName() const { return TheDef.getValueAsString("Name"); } Get the name of the enumerator in the ID enumeration. std::string getEnumeratorName() const { return (TheDef.getName() + "ID").str(); } Get the name of the array holding the register class coverage data; std::string <API key>() const { return (TheDef.getName() + "CoverageData").str(); } Get the name of the global instance variable. StringRef getInstanceVarName() const { return TheDef.getName(); } const Record &getDef() const { return TheDef; } Get the register classes listed in the RegisterBank.RegisterClasses field. std::vector<const <API key> *> <API key>( const CodeGenRegBank &<API key>) const { std::vector<const <API key> *> RCs; for (const auto *RCDef : getDef().<API key>("RegisterClasses")) RCs.push_back(<API key>.getRegClass(RCDef)); return RCs; } Add a register class to the bank without duplicates. void addRegisterClass(const <API key> *RC) { if (llvm::is_contained(RCs, RC)) return; // FIXME? We really want the register size rather than the spill size // since the spill size may be bigger on some targets with // limited load/store instructions. However, we don't store the // register size anywhere (we could sum the sizes of the subregisters // but there may be additional bits too) and we can't derive it from // the VT's reliably due to Untyped. if (<API key> == nullptr) <API key> = RC; else if (<API key>->RSI.get(DefaultMode).SpillSize < RC->RSI.get(DefaultMode).SpillSize) <API key> = RC; assert(<API key> && "RC was nullptr?"); RCs.emplace_back(RC); } const <API key> *<API key>() const { return <API key>; } iterator_range<typename RegisterClassesTy::const_iterator> register_classes() const { return llvm::make_range(RCs.begin(), RCs.end()); } }; class RegisterBankEmitter { private: CodeGenTarget Target; RecordKeeper &Records; void emitHeader(raw_ostream &OS, const StringRef TargetName, const std::vector<RegisterBank> &Banks); void <API key>(raw_ostream &OS, const StringRef TargetName, const std::vector<RegisterBank> &Banks); void <API key>(raw_ostream &OS, const StringRef TargetName, std::vector<RegisterBank> &Banks); public: RegisterBankEmitter(RecordKeeper &R) : Target(R), Records(R) {} void run(raw_ostream &OS); }; } // end anonymous namespace Emit code to declare the ID enumeration and external global instance variables. void RegisterBankEmitter::emitHeader(raw_ostream &OS, const StringRef TargetName, const std::vector<RegisterBank> &Banks) { // <Target>RegisterBankInfo.h OS << "namespace llvm {\n" << "namespace " << TargetName << " {\n" << "enum : unsigned {\n"; OS << " InvalidRegBankID = ~0u,\n"; unsigned ID = 0; for (const auto &Bank : Banks) OS << " " << Bank.getEnumeratorName() << " = " << ID++ << ",\n"; OS << " NumRegisterBanks,\n" << "};\n" << "} // end namespace " << TargetName << "\n" << "} // end namespace llvm\n"; } Emit declarations of the <Target>GenRegisterBankInfo class. void RegisterBankEmitter::<API key>( raw_ostream &OS, const StringRef TargetName, const std::vector<RegisterBank> &Banks) { OS << "private:\n" << " static RegisterBank *RegBanks[];\n\n" << "protected:\n" << " " << TargetName << "GenRegisterBankInfo();\n" << "\n"; } Visit each register class belonging to the given register bank. A class belongs to the bank iff any of these apply: * It is explicitly specified * It is a subclass of a class that is a member. * It is a class containing subregisters of the registers of a class that is a member. This is known as a subreg-class. This function must be called for each explicitly specified register class. \param RC The register class to search. \param Kind A debug string containing the path the visitor took to reach RC. \param VisitFn The action to take for each class visited. It may be called multiple times for a given class if there are multiple paths to the class. static void <API key>( const CodeGenRegBank &<API key>, const <API key> *RC, const Twine &Kind, std::function<void(const <API key> *, StringRef)> VisitFn, SmallPtrSetImpl<const <API key> *> &VisitedRCs) { // Make sure we only visit each class once to avoid infinite loops. if (VisitedRCs.count(RC)) return; VisitedRCs.insert(RC); // Visit each explicitly named class. VisitFn(RC, Kind.str()); for (const auto &PossibleSubclass : <API key>.getRegClasses()) { std::string TmpKind = (Kind + " (" + PossibleSubclass.getName() + ")").str(); // Visit each subclass of an explicitly named class. if (RC != &PossibleSubclass && RC->hasSubClass(&PossibleSubclass)) <API key>(<API key>, &PossibleSubclass, TmpKind + " " + RC->getName() + " subclass", VisitFn, VisitedRCs); // Visit each class that contains only subregisters of RC with a common // subregister-index. // More precisely, PossibleSubclass is a subreg-class iff Reg:SubIdx is in // PossibleSubclass for all registers Reg from RC using any // subregister-index SubReg for (const auto &SubIdx : <API key>.getSubRegIndices()) { BitVector BV(<API key>.getRegClasses().size()); PossibleSubclass.getSuperRegClasses(&SubIdx, BV); if (BV.test(RC->EnumValue)) { std::string TmpKind2 = (Twine(TmpKind) + " " + RC->getName() + " class-with-subregs: " + RC->getName()) .str(); VisitFn(&PossibleSubclass, TmpKind2); } } } } void RegisterBankEmitter::<API key>( raw_ostream &OS, StringRef TargetName, std::vector<RegisterBank> &Banks) { const CodeGenRegBank &<API key> = Target.getRegBank(); OS << "namespace llvm {\n" << "namespace " << TargetName << " {\n"; for (const auto &Bank : Banks) { std::vector<std::vector<const <API key> *>> RCsGroupedByWord( (<API key>.getRegClasses().size() + 31) / 32); for (const auto &RC : Bank.register_classes()) RCsGroupedByWord[RC->EnumValue / 32].push_back(RC); OS << "const uint32_t " << Bank.<API key>() << "[] = {\n"; unsigned LowestIdxInWord = 0; for (const auto &RCs : RCsGroupedByWord) { OS << " // " << LowestIdxInWord << "-" << (LowestIdxInWord + 31) << "\n"; for (const auto &RC : RCs) { std::string QualifiedRegClassID = (Twine(RC->Namespace) + "::" + RC->getName() + "RegClassID").str(); OS << " (1u << (" << QualifiedRegClassID << " - " << LowestIdxInWord << ")) |\n"; } OS << " 0,\n"; LowestIdxInWord += 32; } OS << "};\n"; } OS << "\n"; for (const auto &Bank : Banks) { std::string QualifiedBankID = (TargetName + "::" + Bank.getEnumeratorName()).str(); const <API key> &RC = *Bank.<API key>(); unsigned Size = RC.RSI.get(DefaultMode).SpillSize; OS << "RegisterBank " << Bank.getInstanceVarName() << "( " << QualifiedBankID << ", /* Name */ \"" << Bank.getName() << "\", /* Size */ " << Size << ", " << "/* CoveredRegClasses */ " << Bank.<API key>() << ", /* NumRegClasses */ " << <API key>.getRegClasses().size() << ");\n"; } OS << "} // end namespace " << TargetName << "\n" << "\n"; OS << "RegisterBank *" << TargetName << "GenRegisterBankInfo::RegBanks[] = {\n"; for (const auto &Bank : Banks) OS << " &" << TargetName << "::" << Bank.getInstanceVarName() << ",\n"; OS << "};\n\n"; OS << TargetName << "GenRegisterBankInfo::" << TargetName << "GenRegisterBankInfo()\n" << " : RegisterBankInfo(RegBanks, " << TargetName << "::NumRegisterBanks) {\n" << " // Assert that RegBank indices match their ID's\n" << "#ifndef NDEBUG\n" << " unsigned Index = 0;\n" << " for (const auto &RB : RegBanks)\n" << " assert(Index++ == RB->getID() && \"Index != ID\");\n" << "#endif // NDEBUG\n" << "}\n" << "} // end namespace llvm\n"; } void RegisterBankEmitter::run(raw_ostream &OS) { StringRef TargetName = Target.getName(); const CodeGenRegBank &<API key> = Target.getRegBank(); Records.startTimer("Analyze records"); std::vector<RegisterBank> Banks; for (const auto &V : Records.<API key>("RegisterBank")) { SmallPtrSet<const <API key> *, 8> VisitedRCs; RegisterBank Bank(*V); for (const <API key> *RC : Bank.<API key>(<API key>)) { <API key>( <API key>, RC, "explicit", [&Bank](const <API key> *RC, StringRef Kind) { LLVM_DEBUG(dbgs() << "Added " << RC->getName() << "(" << Kind << ")\n"); Bank.addRegisterClass(RC); }, VisitedRCs); } Banks.push_back(Bank); } // Warn about ambiguous MIR caused by register bank/class name clashes. Records.startTimer("Warn ambiguous"); for (const auto &Class : <API key>.getRegClasses()) { for (const auto &Bank : Banks) { if (Bank.getName().lower() == StringRef(Class.getName()).lower()) { PrintWarning(Bank.getDef().getLoc(), "Register bank names should be " "distinct from register classes " "to avoid ambiguous MIR"); PrintNote(Bank.getDef().getLoc(), "RegisterBank was declared here"); PrintNote(Class.getDef()->getLoc(), "RegisterClass was declared here"); } } } Records.startTimer("Emit output"); <API key>("Register Bank Source Fragments", OS); OS << "#ifdef <API key>\n" << "#undef <API key>\n"; emitHeader(OS, TargetName, Banks); OS << "#endif // <API key>\n\n" << "#ifdef <API key>\n" << "#undef <API key>\n"; <API key>(OS, TargetName, Banks); OS << "#endif // <API key>\n\n" << "#ifdef <API key>\n" << "#undef <API key>\n"; <API key>(OS, TargetName, Banks); OS << "#endif // <API key>\n"; } namespace llvm { void EmitRegisterBank(RecordKeeper &RK, raw_ostream &OS) { RegisterBankEmitter(RK).run(OS); } } // end namespace llvm
// MainWindow.h // fakeThunder #import <Cocoa/Cocoa.h> @interface MainWindow : NSWindowController @end
// David Eberly, Geometric Tools, Redmond WA 98052 #include <GTEnginePCH.h> #include <Graphics/GteViewVolumeNode.h> using namespace gte; ViewVolumeNode::ViewVolumeNode(std::shared_ptr<ViewVolume> const& viewVolume) : mOnUpdate([](ViewVolumeNode*){}) { SetViewVolume(viewVolume); } void ViewVolumeNode::SetViewVolume(std::shared_ptr<ViewVolume> const& viewVolume) { mViewVolume = viewVolume; if (mViewVolume) { Matrix4x4<float> rotate; #if defined(GTE_USE_MAT_VEC) rotate.SetCol(0, mViewVolume->GetDVector()); rotate.SetCol(1, mViewVolume->GetUVector()); rotate.SetCol(2, mViewVolume->GetRVector()); rotate.SetCol(3, { 0.0f, 0.0f, 0.0f, 1.0f }); #else rotate.SetRow(0, mViewVolume->GetDVector()); rotate.SetRow(1, mViewVolume->GetUVector()); rotate.SetRow(2, mViewVolume->GetRVector()); rotate.SetRow(3, { 0.0f, 0.0f, 0.0f, 1.0f }); #endif localTransform.SetTranslation(mViewVolume->GetPosition()); localTransform.SetRotation(rotate); Update(); } } void ViewVolumeNode::UpdateWorldData(double applicationTime) { Node::UpdateWorldData(applicationTime); if (mViewVolume) { Vector4<float> position = worldTransform.GetTranslationW1(); Matrix4x4<float> const& rotate = worldTransform.GetHMatrix(); #if defined(GTE_USE_MAT_VEC) Vector4<float> dVector = rotate.GetCol(0); Vector4<float> uVector = rotate.GetCol(1); Vector4<float> rVector = rotate.GetCol(2); #else Vector4<float> dVector = rotate.GetRow(0); Vector4<float> uVector = rotate.GetRow(1); Vector4<float> rVector = rotate.GetRow(2); #endif mViewVolume->SetFrame(position, dVector, uVector, rVector); mOnUpdate(this); } }
# coding: utf-8 import json import logging import dateutil.parser import pytz from werkzeug import urls from odoo import api, fields, models, _ from odoo.addons.payment.models.payment_acquirer import ValidationError from odoo.addons.payment_paypal.controllers.main import PaypalController from odoo.tools.float_utils import float_compare _logger = logging.getLogger(__name__) class AcquirerPaypal(models.Model): _inherit = 'payment.acquirer' provider = fields.Selection(selection_add=[('paypal', 'Paypal')]) <API key> = fields.Char('Paypal Email ID', <API key>='paypal', groups='base.group_user') <API key> = fields.Char( 'Paypal Merchant ID', groups='base.group_user', help='The Merchant ID is used to ensure communications coming from Paypal are valid and secured.') paypal_use_ipn = fields.Boolean('Use IPN', default=True, help='Paypal Instant Payment Notification', groups='base.group_user') paypal_pdt_token = fields.Char(string='Paypal PDT Token', help='Payment Data Transfer allows you to receive notification of successful payments as they are made.', groups='base.group_user') # Server 2 server paypal_api_enabled = fields.Boolean('Use Rest API', default=False) paypal_api_username = fields.Char('Rest API Username', groups='base.group_user') paypal_api_password = fields.Char('Rest API Password', groups='base.group_user') <API key> = fields.Char('Access Token', groups='base.group_user') <API key> = fields.Datetime('Access Token Validity', groups='base.group_user') # Default paypal fees fees_dom_fixed = fields.Float(default=0.35) fees_dom_var = fields.Float(default=3.4) fees_int_fixed = fields.Float(default=0.35) fees_int_var = fields.Float(default=3.9) def <API key>(self): """Get advanced feature support by provider. Each provider should add its technical in the corresponding key for the following features: * fees: support payment fees computations * authorize: support authorizing payment (separates authorization and capture) * tokenize: support saving payment data in a payment.tokenize object """ res = super(AcquirerPaypal, self).<API key>() res['fees'].append('paypal') return res @api.model def _get_paypal_urls(self, environment): """ Paypal URLS """ if environment == 'prod': return { 'paypal_form_url': 'https: 'paypal_rest_url': 'https://api.paypal.com/v1/oauth2/token', } else: return { 'paypal_form_url': 'https: 'paypal_rest_url': 'https://api.sandbox.paypal.com/v1/oauth2/token', } @api.multi def paypal_compute_fees(self, amount, currency_id, country_id): """ Compute paypal fees. :param float amount: the amount to pay :param integer country_id: an ID of a res.country, or None. This is the customer's country, to be compared to the acquirer company country. :return float fees: computed fees """ if not self.fees_active: return 0.0 country = self.env['res.country'].browse(country_id) if country and self.company_id.country_id.id == country.id: percentage = self.fees_dom_var fixed = self.fees_dom_fixed else: percentage = self.fees_int_var fixed = self.fees_int_fixed fees = (percentage / 100.0 * amount + fixed) / (1 - percentage / 100.0) return fees @api.multi def <API key>(self, values): base_url = self.env['ir.config_parameter'].sudo().get_param('web.base.url') paypal_tx_values = dict(values) paypal_tx_values.update({ 'cmd': '_xclick', 'business': self.<API key>, 'item_name': '%s: %s' % (self.company_id.name, values['reference']), 'item_number': values['reference'], 'amount': values['amount'], 'currency_code': values['currency'] and values['currency'].name or '', 'address1': values.get('partner_address'), 'city': values.get('partner_city'), 'country': values.get('partner_country') and values.get('partner_country').code or '', 'state': values.get('partner_state') and (values.get('partner_state').code or values.get('partner_state').name) or '', 'email': values.get('partner_email'), 'zip_code': values.get('partner_zip'), 'first_name': values.get('partner_first_name'), 'last_name': values.get('partner_last_name'), 'paypal_return': urls.url_join(base_url, PaypalController._return_url), 'notify_url': urls.url_join(base_url, PaypalController._notify_url), 'cancel_return': urls.url_join(base_url, PaypalController._cancel_url), 'handling': '%.2f' % paypal_tx_values.pop('fees', 0.0) if self.fees_active else False, 'custom': json.dumps({'return_url': '%s' % paypal_tx_values.pop('return_url')}) if paypal_tx_values.get('return_url') else False, }) return paypal_tx_values @api.multi def <API key>(self): return self._get_paypal_urls(self.environment)['paypal_form_url'] class TxPaypal(models.Model): _inherit = 'payment.transaction' paypal_txn_type = fields.Char('Transaction type') # FORM RELATED METHODS @api.model def <API key>(self, data): reference, txn_id = data.get('item_number'), data.get('txn_id') if not reference or not txn_id: error_msg = _('Paypal: received data with missing reference (%s) or txn_id (%s)') % (reference, txn_id) _logger.info(error_msg) raise ValidationError(error_msg) # find tx -> @TDENOTE use txn_id ? txs = self.env['payment.transaction'].search([('reference', '=', reference)]) if not txs or len(txs) > 1: error_msg = 'Paypal: received data for reference %s' % (reference) if not txs: error_msg += '; no order found' else: error_msg += '; multiple order found' _logger.info(error_msg) raise ValidationError(error_msg) return txs[0] @api.multi def <API key>(self, data): invalid_parameters = [] _logger.info('Received a notification from Paypal with IPN version %s', data.get('notify_version')) if data.get('test_ipn'): _logger.warning( 'Received a notification from Paypal using sandbox' ), # TODO: txn_id: shoudl be false at draft, set afterwards, and verified with txn details if self.acquirer_reference and data.get('txn_id') != self.acquirer_reference: invalid_parameters.append(('txn_id', data.get('txn_id'), self.acquirer_reference)) # check what is buyed if float_compare(float(data.get('mc_gross', '0.0')), (self.amount + self.fees), 2) != 0: invalid_parameters.append(('mc_gross', data.get('mc_gross'), '%.2f' % self.amount)) # mc_gross is amount + fees if data.get('mc_currency') != self.currency_id.name: invalid_parameters.append(('mc_currency', data.get('mc_currency'), self.currency_id.name)) if 'handling_amount' in data and float_compare(float(data.get('handling_amount')), self.fees, 2) != 0: invalid_parameters.append(('handling_amount', data.get('handling_amount'), self.fees)) # check buyer if self.payment_token_id and data.get('payer_id') != self.payment_token_id.acquirer_ref: invalid_parameters.append(('payer_id', data.get('payer_id'), self.payment_token_id.acquirer_ref)) # check seller if data.get('receiver_id') and self.acquirer_id.<API key> and data['receiver_id'] != self.acquirer_id.<API key>: invalid_parameters.append(('receiver_id', data.get('receiver_id'), self.acquirer_id.<API key>)) if not data.get('receiver_id') or not self.acquirer_id.<API key>: # Check receiver_email only if receiver_id was not checked. # In Paypal, this is possible to configure as receiver_email a different email than the business email (the login email) # In Odoo, there is only one field for the Paypal email: the business email. This isn't possible to set a receiver_email # different than the business email. Therefore, if you want such a configuration in your Paypal, you are then obliged to fill # the Merchant ID in the Paypal payment acquirer in Odoo, so the check is performed on this variable instead of the receiver_email. # At least one of the two checks must be done, to avoid fraudsters. if data.get('receiver_email') != self.acquirer_id.<API key>: invalid_parameters.append(('receiver_email', data.get('receiver_email'), self.acquirer_id.<API key>)) return invalid_parameters @api.multi def <API key>(self, data): status = data.get('payment_status') res = { 'acquirer_reference': data.get('txn_id'), 'paypal_txn_type': data.get('payment_type'), } if status in ['Completed', 'Processed']: _logger.info('Validated Paypal payment for tx %s: set as done' % (self.reference)) try: # dateutil and pytz don't recognize abbreviations PDT/PST tzinfos = { 'PST': -8 * 3600, 'PDT': -7 * 3600, } date = dateutil.parser.parse(data.get('payment_date'), tzinfos=tzinfos).astimezone(pytz.utc) except: date = fields.Datetime.now() res.update(date=date) self.<API key>() return self.write(res) elif status in ['Pending', 'Expired']: _logger.info('Received notification for Paypal payment %s: set as pending' % (self.reference)) res.update(state_message=data.get('pending_reason', '')) self.<API key>() return self.write(res) else: error = 'Received unrecognized status for Paypal payment %s: %s, set as error' % (self.reference, status) _logger.info(error) res.update(state_message=error) self.<API key>() return self.write(res)
=head1 NAME apt-cudf.conf - Configuration file for apt-cudf =head1 DESCRIPTION The configuration file allows one to define default optimization criterias for all solvers known by apt-cudf =head1 SYNTAX solver: <solver list> | '*' A comma-separated list of solvers. The character will make the optimization criteria as default for all solvers without a more specific definition. upgrade: <optimization criteria> dist-upgrade: <optimization criteria> install: <optimization criteria> remove: <optimization criteria> Default optimization criteria associated to apt-get actions. The optimization criteria is solver specific. Specifying a incorrect criteria will result in an error from the underlying cudf solver. Please refere to the solver man page for the correct syntax trendy: <optimization criteria> paranoid: <optimization criteria> <keyword>: <optimization criteria> Define a shortcut for an optimization criteria. The shortcut can then be used by apt-get to pass a specific optimization criteria for a cudf solver apt-get install gnome --solver aspcud -o "APT::Solver::aspcud::Preferences=trendy" =head1 EXAMPLE solver: mccs-cbc , mccs-lpsolve upgrade: -lex[-new,-removed,-notuptodate] dist-upgrade: -lex[-notuptodate,-new] install: -lex[-removed,-changed] remove: -lex[-removed,-changed] trendy: -lex[-removed,-notuptodate,-unsat_recommends,-new] paranoid: -lex[-removed,-changed] solver: * upgrade: -new,-removed,-notuptodate dist-upgrade: -notuptodate,-new install: -removed,-changed remove: -removed,-changed trendy: -removed,-notuptodate,-unsat_recommends,-new paranoid: -removed,-changed =head1 SEE ALSO apt-cudf(8), apt-get(8), update-cudf-solvers(8), L<README.cudf-solvers|file:///usr/share/doc/apt-cudf/README.cudf-solvers>, L<README.Debian|file:///usr/share/doc/apt-cudf/README.Debian> =head1 AUTHOR Copyright: (C) 2011 Pietro Abate <pietro.abate@pps.jussieu.fr> Copyright: (C) 2011 Stefano Zacchiroli <zack@debian.org> License: GNU Lesser General Public License (GPL), version 3 or above =cut
# This program is free software: you can redistribute it and/or modify # (at your option) any later version. # This program is distributed in the hope that it will be useful, # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # experiments.py import unittest import weka.core.jvm as jvm import weka.core.converters as converters import weka.classifiers as classifiers import weka.experiments as experiments import weka.plot.experiments as plot import wekatests.tests.weka_test as weka_test class TestExperiments(weka_test.WekaTest): def <API key>(self): """ Tests the plot_experiment method. """ datasets = [self.datafile("bolts.arff"), self.datafile("bodyfat.arff"), self.datafile("autoPrice.arff")] cls = [ classifiers.Classifier("weka.classifiers.trees.REPTree"), classifiers.Classifier("weka.classifiers.functions.LinearRegression"), classifiers.Classifier("weka.classifiers.functions.SMOreg"), ] outfile = self.tempfile("results-rs.arff") exp = experiments.<API key>( classification=False, runs=10, percentage=66.6, preserve_order=False, datasets=datasets, classifiers=cls, result=outfile) exp.setup() exp.run() # evaluate loader = converters.loader_for_file(outfile) data = loader.load_file(outfile) matrix = experiments.ResultMatrix("weka.experiment.<API key>") tester = experiments.Tester("weka.experiment.<API key>") tester.resultmatrix = matrix comparison_col = data.attribute_by_name("<API key>").index tester.instances = data tester.header(comparison_col) tester.<API key>(0, comparison_col) # plot plot.plot_experiment(matrix, title="Random split (w/ StdDev)", measure="Correlation coefficient", show_stdev=True, wait=False) plot.plot_experiment(matrix, title="Random split", measure="Correlation coefficient", wait=False) def suite(): """ Returns the test suite. :return: the test suite :rtype: unittest.TestSuite """ return unittest.TestLoader().<API key>(TestExperiments) if __name__ == '__main__': jvm.start() unittest.TextTestRunner().run(suite()) jvm.stop()
<div class="container" style="background-color:%textbackgroundcolor{.75}%;"> <span class="time_initial"> %time% </span> <span class="buddyicon"> <img src="%userIconPath%" width="24" height="24" /> </span> <div class="placeholder" visible="%userIconPath%"> <span class="sender incoming"> %sender% </span> <span class="message incoming_link"> <br>%message% </span> </div> <span class="clear"></span> </div> <span id="insert"></span>
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using OpenCL.Net; namespace Conv.NET { [Serializable] public class FullyConnectedLayer : Layer { #region Fields private double dropoutParameter; // Host private float[] weightsHost; private float[] biasesHost; // Device [NonSerialized] private Mem dropoutMaskGPU; [NonSerialized] private Mem weightsGPU; [NonSerialized] private Mem biasesGPU; [NonSerialized] private Mem weightsGradientsGPU; [NonSerialized] private Mem biasesGradientsGPU; [NonSerialized] private Mem weightsSpeedGPU; [NonSerialized] private Mem biasesSpeedGPU; // Global and local work-group sizes (for OpenCL kernels) - will be set in SetWorkGroupSizes(); private IntPtr[] <API key>; private IntPtr[] <API key>; private IntPtr[] <API key>; private IntPtr[] <API key>; private IntPtr[] <API key>; private IntPtr[] <API key>; private IntPtr[] <API key>; private IntPtr[] <API key>; #endregion #region Properties public override Mem WeightsGPU { get { return weightsGPU; } } public override double DropoutParameter { set { this.dropoutParameter = value; } } #endregion #region Setup methods <summary> Constructor of fully connected layer type. Specify number of units as argument. </summary> <param name="nUnits"></param> public FullyConnectedLayer(int nUnits) { this.type = "FullyConnected"; this.nOutputUnits = nUnits; } public override void SetupOutput() { this.outputDepth = nOutputUnits; this.outputHeight = 1; this.outputWidth = 1; this.outputNeurons = new Neurons(this.nOutputUnits); #if OPENCL_ENABLED this.dropoutMaskGPU = (Mem)Cl.CreateBuffer( OpenCLSpace.Context, MemFlags.ReadWrite, (IntPtr)(sizeof(bool) * nOutputUnits * inputNeurons.MiniBatchSize), out OpenCLSpace.ClError); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "<API key>(): Cl.CreateBuffer"); OpenCLSpace.WipeBuffer(dropoutMaskGPU, nOutputUnits * inputNeurons.MiniBatchSize, typeof(bool)); #endif } public override void <API key>(string Option) { base.<API key>(Option); // makes sure this method is only call AFTER "SetupOutput()" if (Option == "random") // sample new parameters { // WEIGHTS are initialized as normally distributed numbers with mean 0 and std equals to sqrt(2/nInputUnits) // BIASES are initialized to a small positive number, e.g. 0.001 this.weightsHost = new float[nOutputUnits * nInputUnits]; this.biasesHost = new float[nOutputUnits]; double weightsStdDev = Math.Sqrt(2.0 / (10 * nInputUnits)); double uniformRand1; double uniformRand2; double tmp; for (int iRow = 0; iRow < nOutputUnits; iRow++) { for (int iCol = 0; iCol < nInputUnits; iCol++) { uniformRand1 = Global.rng.NextDouble(); uniformRand2 = Global.rng.NextDouble(); // Use a Box-Muller transform to get a random normal(0,1) tmp = Math.Sqrt(-2.0 * Math.Log(uniformRand1)) * Math.Sin(2.0 * Math.PI * uniformRand2); tmp = weightsStdDev * tmp; // rescale weightsHost[iRow * nInputUnits + iCol] = (float)tmp; } biasesHost[iRow] = 0.00f; } } // else Option must be ''load'' => do not sample parameters, just load them from host to device int weightBufferSize = sizeof(float) * (outputNeurons.NumberOfUnits * inputNeurons.NumberOfUnits); int biasesBufferSize = sizeof(float) * outputNeurons.NumberOfUnits; this.weightsGPU = (Mem)Cl.CreateBuffer( OpenCLSpace.Context, MemFlags.ReadWrite | MemFlags.CopyHostPtr, (IntPtr)weightBufferSize, weightsHost, out OpenCLSpace.ClError); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.CreateBuffer"); this.biasesGPU = (Mem)Cl.CreateBuffer( OpenCLSpace.Context, MemFlags.ReadWrite | MemFlags.CopyHostPtr, (IntPtr)biasesBufferSize, biasesHost, out OpenCLSpace.ClError); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.CreateBuffer"); // Also create weightsGradients and biasesGradients buffers and initialize them to zero this.weightsGradientsGPU = (Mem)Cl.CreateBuffer(OpenCLSpace.Context, MemFlags.ReadWrite, (IntPtr)weightBufferSize, out OpenCLSpace.ClError); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.CreateBuffer"); OpenCLSpace.WipeBuffer(weightsGradientsGPU, (nInputUnits * nOutputUnits), typeof(float)); this.biasesGradientsGPU = (Mem)Cl.CreateBuffer( OpenCLSpace.Context, MemFlags.ReadWrite, (IntPtr)biasesBufferSize, out OpenCLSpace.ClError); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.CreateBuffer"); OpenCLSpace.WipeBuffer(biasesGradientsGPU, nOutputUnits, typeof(float)); // Also create weightsSpeed and biasesSpeed buffers and initialize them to zero this.weightsSpeedGPU = (Mem)Cl.CreateBuffer(OpenCLSpace.Context, MemFlags.ReadWrite, (IntPtr)weightBufferSize, out OpenCLSpace.ClError); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.CreateBuffer"); OpenCLSpace.WipeBuffer(weightsSpeedGPU, (nInputUnits * nOutputUnits), typeof(float)); this.biasesSpeedGPU = (Mem)Cl.CreateBuffer(OpenCLSpace.Context, MemFlags.ReadWrite, (IntPtr)biasesBufferSize, out OpenCLSpace.ClError); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.CreateBuffer"); OpenCLSpace.WipeBuffer(biasesSpeedGPU, nOutputUnits, typeof(float)); } public override void SetWorkGroups() { // Work group sizes will be set as follows: // global work size = smallest multiple of OPTIMAL_GROUP_SIZE larger than // the total number of processes needed (for efficiency). // local work size = as close as possible to OPTIMAL_GROUP_SIZE (making sure // that global worksize is a multiple of this) // OPTIMAL_GROUP_SIZE is a small multiple of BASE_GROUP_SIZE, which in turn is a // constant multiple of 2, platform-dependent, e.g. 32 (Nvidia // WARP) or 64 (AMD WAVEFRONT). int miniBatchSize = outputNeurons.MiniBatchSize; // FeedForward (2D) <API key> // Local int optimalToBaseRatio = OpenCLSpace.OPTIMAL_GROUP_SIZE / OpenCLSpace.BASE_GROUP_SIZE; this.<API key> = new IntPtr[] { (IntPtr)OpenCLSpace.BASE_GROUP_SIZE, (IntPtr)optimalToBaseRatio }; // Global int smallestMultiple0 = (int)(OpenCLSpace.BASE_GROUP_SIZE * Math.Ceiling((double)(nOutputUnits) / (double)OpenCLSpace.BASE_GROUP_SIZE)); int smallestMultiple1 = (int)(optimalToBaseRatio * Math.Ceiling((double)(miniBatchSize) / (double)optimalToBaseRatio)); this.<API key> = new IntPtr[] { (IntPtr)smallestMultiple0, (IntPtr)smallestMultiple1 }; // Local this.<API key> = new IntPtr[] { (IntPtr)OpenCLSpace.BASE_GROUP_SIZE, (IntPtr)optimalToBaseRatio }; // Global smallestMultiple0 = (int)(OpenCLSpace.BASE_GROUP_SIZE * Math.Ceiling((double)(nInputUnits) / (double)OpenCLSpace.BASE_GROUP_SIZE)); // input this time! this.<API key> = new IntPtr[] { (IntPtr)smallestMultiple0, (IntPtr)smallestMultiple1 }; // UpdateSpeeds and UpdateParameters (2D) <API key> // Local this.<API key> = new IntPtr[] { (IntPtr)optimalToBaseRatio, (IntPtr)OpenCLSpace.BASE_GROUP_SIZE }; // product is OPTIMAL_WORK_SIZE // Global smallestMultiple0 = (int)(optimalToBaseRatio * Math.Ceiling((double)(nOutputUnits) / (double)optimalToBaseRatio)); smallestMultiple1 = (int)(OpenCLSpace.BASE_GROUP_SIZE * Math.Ceiling((double)(nInputUnits) / (double)OpenCLSpace.BASE_GROUP_SIZE)); this.<API key> = new IntPtr[] { (IntPtr)smallestMultiple0, (IntPtr)smallestMultiple1 }; // Max norm constrain this.<API key> = new IntPtr[] { (IntPtr)OpenCLSpace.BASE_GROUP_SIZE }; int smallestMultipleAux = (int)(OpenCLSpace.BASE_GROUP_SIZE * Math.Ceiling((double)(nOutputUnits) / (double)OpenCLSpace.BASE_GROUP_SIZE)); this.<API key> = new IntPtr[] { (IntPtr)smallestMultipleAux }; } public override void CopyBuffersToHost() { OpenCLSpace.ClError = Cl.EnqueueReadBuffer( OpenCLSpace.Queue, weightsGPU, // source Bool.True, (IntPtr)0, (IntPtr)(sizeof(float) * nInputUnits * nOutputUnits), weightsHost, // destination 0, null, out OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "clEnqueueReadBuffer weightsGPU"); OpenCLSpace.ClError = Cl.ReleaseEvent(OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.ReleaseEvent"); OpenCLSpace.ClError = Cl.EnqueueReadBuffer( OpenCLSpace.Queue, biasesGPU, // source Bool.True, (IntPtr)0, (IntPtr)(sizeof(float) * nOutputUnits), biasesHost, // destination 0, null, out OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "clEnqueueReadBuffer biasesGPU"); OpenCLSpace.ClError = Cl.ReleaseEvent(OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.ReleaseEvent"); OpenCLSpace.ClError = Cl.Finish(OpenCLSpace.Queue); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.Finish"); // Speeds are not saved. } #endregion #region Methods public override void FeedForward() { #if TIMING_LAYERS Utils.FCForwardTimer.Start(); #endif #if OPENCL_ENABLED // Set kernel arguments OpenCLSpace.ClError = Cl.SetKernelArg(OpenCLSpace.FCForward, 0, outputNeurons.ActivationsGPU); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCForward, 1, inputNeurons.ActivationsGPU); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCForward, 2, weightsGPU); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCForward, 3, biasesGPU); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCForward, 4, (IntPtr)sizeof(int), nInputUnits); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCForward, 5, (IntPtr)sizeof(int), nOutputUnits); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCForward, 6, (IntPtr)sizeof(int), inputNeurons.MiniBatchSize); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCForward, 7, (IntPtr)sizeof(float), (float)dropoutParameter); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCForward, 8, (IntPtr)sizeof(ulong), (ulong)Guid.NewGuid().GetHashCode()); // this should be quite a good random seed OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCForward, 9, dropoutMaskGPU); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "FullyConnected.FeedForward(): Cl.SetKernelArg"); // Run kernel OpenCLSpace.ClError = Cl.<API key>( OpenCLSpace.Queue, OpenCLSpace.FCForward, 2, null, <API key>, <API key>, 0, null, out OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "FullyConnected.FeedForward(): Cl.<API key>"); OpenCLSpace.ClError = Cl.ReleaseEvent(OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.ReleaseEvent"); OpenCLSpace.ClError = Cl.Finish(OpenCLSpace.Queue); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.Finish"); #else // TODO: add dropout CPU // Generate dropout mask if (dropoutParameter < 1) { for (int iUnit = 0; iUnit < nOutputUnits * inputNeurons.MiniBatchSize; ++iUnit) dropoutMask[iUnit] = Global.RandomDouble() < dropoutParameter; } for (int m = 0; m < inputNeurons.MiniBatchSize; m++) { double[] unbiasedOutput = Utils.<API key>(weights, inputNeurons.GetHost()[m]); this.outputNeurons.SetHost(m, unbiasedOutput.Zip(biases, (x, y) => x + y).ToArray()); } #endif #if TIMING_LAYERS Utils.FCForwardTimer.Stop(); #endif } public override void BackPropagate() { #if TIMING_LAYERS Utils.FCBackpropTimer.Start(); #endif #if OPENCL_ENABLED // Set kernel arguments OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCBackward, 0, inputNeurons.DeltaGPU); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCBackward, 1, outputNeurons.DeltaGPU); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCBackward, 2, weightsGPU); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCBackward, 3, dropoutMaskGPU); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCBackward, 4, (IntPtr)sizeof(int), nInputUnits); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCBackward, 5, (IntPtr)sizeof(int), nOutputUnits); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCBackward, 6, (IntPtr)sizeof(int), inputNeurons.MiniBatchSize); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "FullyConnected.BackPropagate(): Cl.SetKernelArg"); // Run kernel OpenCLSpace.ClError = Cl.<API key>( OpenCLSpace.Queue, OpenCLSpace.FCBackward, 2, null, <API key>, <API key>, 0, null, out OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "FullyConnected.BackPropagate(): Cl.<API key>"); OpenCLSpace.ClError = Cl.ReleaseEvent(OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.ReleaseEvent"); OpenCLSpace.ClError = Cl.Finish(OpenCLSpace.Queue); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.Finish"); #else for (int m = 0; m < inputNeurons.MiniBatchSize; m++) { inputNeurons.DeltaHost[m] = Utils.<API key>(weights, outputNeurons.DeltaHost[m]); } #endif #if TIMING_LAYERS Utils.FCBackpropTimer.Stop(); #endif } public override void UpdateSpeeds(double learningRate, double momentumCoefficient, double <API key>) { #if TIMING_LAYERS Utils.FCUpdateSpeedsTimer.Start(); #endif #if <API key> float[,] weightsBeforeUpdate = new float[output.NumberOfUnits, input.NumberOfUnits]; #if OPENCL_ENABLED // Display weights before update OpenCLSpace.ClError = Cl.EnqueueReadBuffer(OpenCLSpace.Queue, weightsGPU, // source Bool.True, (IntPtr)0, (IntPtr)(output.NumberOfUnits * input.NumberOfUnits * sizeof(float)), weightsBeforeUpdate, // destination 0, null, out OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "FullyConnectedLayer.UpdateParameters Cl.clEnqueueReadBuffer weightsBeforeUpdate"); #else weightsBeforeUpdate = weights; #endif Console.WriteLine("\nWeights BEFORE update:"); for (int i = 0; i < weightsBeforeUpdate.GetLength(0); i++) { for (int j = 0; j < weightsBeforeUpdate.GetLength(1); j++) Console.Write("{0} ", weightsBeforeUpdate[i, j]); Console.WriteLine(); } Console.WriteLine(); Console.ReadKey(); // Display biases before update float[] biasesBeforeUpdate = new float[output.NumberOfUnits]; #if OPENCL_ENABLED OpenCLSpace.ClError = Cl.EnqueueReadBuffer(OpenCLSpace.Queue, biasesGPU, // source Bool.True, (IntPtr)0, (IntPtr)(output.NumberOfUnits * sizeof(float)), biasesBeforeUpdate, // destination 0, null, out OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "FullyConnectedLayer.UpdateParameters Cl.clEnqueueReadBuffer biasesBeforeUpdate"); #else biasesBeforeUpdate = biases; #endif Console.WriteLine("\nBiases BEFORE update:"); for (int i = 0; i < biasesBeforeUpdate.Length; i++) { Console.Write("{0} ", biasesBeforeUpdate[i]); } Console.WriteLine(); Console.ReadKey(); // Display weight update speed before update float[,] <API key> = new float[output.NumberOfUnits, input.NumberOfUnits]; #if OPENCL_ENABLED OpenCLSpace.ClError = Cl.EnqueueReadBuffer(OpenCLSpace.Queue, <API key>, // source Bool.True, (IntPtr)0, (IntPtr)(output.NumberOfUnits * input.NumberOfUnits * sizeof(float)), <API key>, // destination 0, null, out OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "FullyConnectedLayer.UpdateParameters Cl.clEnqueueReadBuffer weightsUpdateSpeed"); #else <API key> = weightsUpdateSpeed; #endif Console.WriteLine("\nWeight update speed BEFORE update:"); for (int i = 0; i < <API key>.GetLength(0); i++) { for (int j = 0; j < <API key>.GetLength(1); j++) Console.Write("{0} ", <API key>[i, j]); Console.WriteLine(); } Console.WriteLine(); Console.ReadKey(); // Display input activations before update #endif #if OPENCL_ENABLED // Set kernel arguments OpenCLSpace.ClError = Cl.SetKernelArg(OpenCLSpace.FCUpdateSpeeds, 0, weightsSpeedGPU); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCUpdateSpeeds, 1, biasesSpeedGPU); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCUpdateSpeeds, 2, weightsGradientsGPU); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCUpdateSpeeds, 3, biasesGradientsGPU); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCUpdateSpeeds, 4, inputNeurons.ActivationsGPU); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCUpdateSpeeds, 5, outputNeurons.DeltaGPU); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCUpdateSpeeds, 6, dropoutMaskGPU); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCUpdateSpeeds, 7, (IntPtr)sizeof(int), nInputUnits); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCUpdateSpeeds, 8, (IntPtr)sizeof(int), nOutputUnits); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCUpdateSpeeds, 9, (IntPtr)sizeof(float), (float)momentumCoefficient); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCUpdateSpeeds, 10, (IntPtr)sizeof(float), (float)learningRate); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCUpdateSpeeds, 11, (IntPtr)sizeof(int), inputNeurons.MiniBatchSize); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCUpdateSpeeds, 12, weightsGPU); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCUpdateSpeeds, 13, (IntPtr)sizeof(float), (float)<API key>); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "FullyConnected.UpdateSpeeds(): Cl.SetKernelArg"); // Run kernel OpenCLSpace.ClError = Cl.<API key>( OpenCLSpace.Queue, OpenCLSpace.FCUpdateSpeeds, 2, null, <API key>, <API key>, 0, null, out OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "FullyConnected.UpdateSpeeds(): Cl.<API key>"); OpenCLSpace.ClError = Cl.ReleaseEvent(OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.ReleaseEvent"); OpenCLSpace.ClError = Cl.Finish(OpenCLSpace.Queue); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.Finish"); #else int miniBatchSize = inputNeurons.MiniBatchSize; for (int m = 0; m < miniBatchSize; m++) { for (int i = 0; i < nOutputUnits; i++) { // weights speed for (int j = 0; j < nInputUnits; j++) { if (m == 0) weightsUpdateSpeed[i, j] *= momentumCoefficient; weightsUpdateSpeed[i, j] -= learningRate/miniBatchSize * inputNeurons.GetHost()[m][j] * outputNeurons.DeltaHost[m][i]; #if GRADIENT_CHECK weightsGradients[i, j] = inputNeurons.GetHost()[m][j] * outputNeurons.DeltaHost[m][i]; #endif } // update biases if (m == 0) biasesUpdateSpeed[i] *= momentumCoefficient; biasesUpdateSpeed[i] -= learningRate/miniBatchSize * outputNeurons.DeltaHost[m][i]; #if GRADIENT_CHECK biasesGradients[i] = outputNeurons.DeltaHost[m][i]; #endif } } // end loop over mini-batch #endif #if TIMING_LAYERS Utils.FCUpdateSpeedsTimer.Stop(); #endif } public override void UpdateParameters(double weightMaxNorm) { #if TIMING_LAYERS Utils.<API key>.Start(); #endif #if OPENCL_ENABLED // Set kernel arguments OpenCLSpace.ClError = Cl.SetKernelArg(OpenCLSpace.FCUpdateParameters, 0, weightsGPU); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCUpdateParameters, 1, biasesGPU); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCUpdateParameters, 2, weightsSpeedGPU); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCUpdateParameters, 3, biasesSpeedGPU); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCUpdateParameters, 4, (IntPtr)sizeof(int), nInputUnits); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.FCUpdateParameters, 5, (IntPtr)sizeof(int), nOutputUnits); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "FullyConnected.UpdateParameters(): Cl.SetKernelArg"); // Run kernel OpenCLSpace.ClError = Cl.<API key>( OpenCLSpace.Queue, OpenCLSpace.FCUpdateParameters, 2, null, <API key>, <API key>, 0, null, out OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "FullyConnected.UpdateParameters(): Cl.<API key>"); OpenCLSpace.ClError = Cl.ReleaseEvent(OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.ReleaseEvent"); // Now constrain norm of each weight vector if (!double.IsInfinity(weightMaxNorm)) { // Set kernel arguments OpenCLSpace.ClError = Cl.SetKernelArg(OpenCLSpace.<API key>, 0, weightsGPU); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.<API key>, 1, (IntPtr)sizeof(int), nOutputUnits); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.<API key>, 2, (IntPtr)sizeof(int), nInputUnits); OpenCLSpace.ClError |= Cl.SetKernelArg(OpenCLSpace.<API key>, 3, (IntPtr)sizeof(float), (float)weightMaxNorm); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "<API key>(): Cl.SetKernelArg"); // Run kernel OpenCLSpace.ClError = Cl.<API key>(OpenCLSpace.Queue, OpenCLSpace.<API key>, 1, null, <API key>, <API key>, 0, null, out OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "<API key>(): Cl.<API key>"); OpenCLSpace.ClError = Cl.ReleaseEvent(OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.ReleaseEvent"); } OpenCLSpace.ClError = Cl.Finish(OpenCLSpace.Queue); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.Finish"); #else for (int i = 0; i < nOutputUnits; i++) { // weights update for (int j = 0; j < nInputUnits; j++) { weights[i, j] += weightsUpdateSpeed[i, j]; } // update biases biases[i] += biasesUpdateSpeed[i]; } #endif #if TIMING_LAYERS Utils.<API key>.Stop(); #endif } #endregion #region Gradient check public override double[] GetParameters() { int nParameters = nInputUnits * nOutputUnits + nOutputUnits; double[] parameters = new double[nParameters]; // Copy weights and biases buffers to host float[] tmpWeights = new float[nInputUnits * nOutputUnits]; OpenCLSpace.ClError = Cl.EnqueueReadBuffer(OpenCLSpace.Queue, weightsGPU, // source Bool.True, (IntPtr)0, (IntPtr)(sizeof(float) * nInputUnits * nOutputUnits), tmpWeights, // destination 0, null, out OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "clEnqueueReadBuffer"); OpenCLSpace.ClError = Cl.ReleaseEvent(OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.ReleaseEvent"); float[] tmpBiases = new float[nOutputUnits]; OpenCLSpace.ClError = Cl.EnqueueReadBuffer(OpenCLSpace.Queue, biasesGPU, // source Bool.True, (IntPtr)0, (IntPtr)(sizeof(float) * nOutputUnits), tmpBiases, // destination 0, null, out OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "clEnqueueReadBuffer"); OpenCLSpace.ClError = Cl.ReleaseEvent(OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.ReleaseEvent"); OpenCLSpace.ClError = Cl.Finish(OpenCLSpace.Queue); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.Finish"); // Convert to double and write into parameters array for (int i = 0; i < nInputUnits*nOutputUnits; ++i) { parameters[i] = (double)tmpWeights[i]; } for (int i = 0; i < nOutputUnits; ++i) { parameters[nInputUnits * nOutputUnits + i] = (double)tmpBiases[i]; } return parameters; } public override double[] <API key>() { int nParameters = nInputUnits * nOutputUnits + nOutputUnits; double[] parameterGradients = new double[nParameters]; // Copy weights and biases gradients buffers to host float[] tmpWeightsGrad = new float[nInputUnits * nOutputUnits]; OpenCLSpace.ClError = Cl.EnqueueReadBuffer(OpenCLSpace.Queue, weightsGradientsGPU, // source Bool.True, (IntPtr)0, (IntPtr)(sizeof(float) * nInputUnits * nOutputUnits), tmpWeightsGrad, // destination 0, null, out OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "clEnqueueReadBuffer"); OpenCLSpace.ClError = Cl.ReleaseEvent(OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.ReleaseEvent"); float[] tmpBiasesGrad = new float[nOutputUnits]; OpenCLSpace.ClError = Cl.EnqueueReadBuffer(OpenCLSpace.Queue, biasesGradientsGPU, // source Bool.True, (IntPtr)0, (IntPtr)(sizeof(float) * nOutputUnits), tmpBiasesGrad, // destination 0, null, out OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "clEnqueueReadBuffer"); OpenCLSpace.ClError = Cl.ReleaseEvent(OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.ReleaseEvent"); OpenCLSpace.ClError = Cl.Finish(OpenCLSpace.Queue); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.Finish"); // Convert to double and write into parameterGradients //Console.WriteLine("Weight gradients:\n"); for (int i = 0; i < nInputUnits * nOutputUnits; ++i) { parameterGradients[i] = (double)tmpWeightsGrad[i]; //Console.Write(" {0}", tmpWeightsGrad[i]); } //Console.ReadKey(); for (int i = 0; i < nOutputUnits; ++i) { parameterGradients[nInputUnits * nOutputUnits + i] = (double)tmpBiasesGrad[i]; } return parameterGradients; } public override void SetParameters(double[] NewParameters) { // Convert to float and write into tmp arrays float[] tmpWeights = new float[nInputUnits * nOutputUnits]; float[] tmpBiases = new float[nOutputUnits]; for (int i = 0; i < nInputUnits * nOutputUnits; ++i) { tmpWeights[i] = (float)NewParameters[i]; } for (int i = 0; i < nOutputUnits; ++i) { tmpBiases[i] = (float)NewParameters[nInputUnits * nOutputUnits + i]; } // Write arrays into buffers on device OpenCLSpace.ClError = Cl.EnqueueWriteBuffer(OpenCLSpace.Queue, weightsGPU, OpenCL.Net.Bool.True, (IntPtr)0, (IntPtr)(sizeof(float) * nInputUnits * nOutputUnits), tmpWeights, 0, null, out OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.EnqueueWriteBuffer"); OpenCLSpace.ClError = Cl.ReleaseEvent(OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.ReleaseEvent"); OpenCLSpace.ClError = Cl.EnqueueWriteBuffer(OpenCLSpace.Queue, biasesGPU, OpenCL.Net.Bool.True, (IntPtr)0, (IntPtr)(sizeof(float) * nOutputUnits), tmpBiases, 0, null, out OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.EnqueueWriteBuffer"); OpenCLSpace.ClError = Cl.ReleaseEvent(OpenCLSpace.ClEvent); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.ReleaseEvent"); OpenCLSpace.ClError = Cl.Finish(OpenCLSpace.Queue); OpenCLSpace.CheckErr(OpenCLSpace.ClError, "Cl.Finish"); } #endregion } }
# Contributing to Viewer ## How to report bugs Make sure it is a Viewer bug Most bugs reported to our bug tracker are actually bugs in user code, not in Viewer code. Keep in mind that just because your code throws an error inside of Viewer, this does *not* mean the bug is a Viewer bug. Ask for help first in a discussion forum like [Stack Overflow](http://stackoverflow.com/). You will get much quicker support, and you will help avoid tying up the Viewer team with invalid bug reports. Disable browser extensions Make sure you have reproduced the bug with all browser extensions and add-ons disabled, as these can sometimes cause things to break in interesting and unpredictable ways. Try using incognito, stealth or anonymous browsing modes. Try the latest version of Viewer Bugs in old versions of Viewer may have already been fixed. In order to avoid reporting known issues, make sure you are always testing against the [latest release](https://github.com/fengyuanchen/viewerjs/releases/latest). We cannot fix bugs in older released files, if a bug has been fixed in a subsequent version of Viewer the site should upgrade. Simplify the test case When experiencing a problem, [reduce your code](http://webkit.org/quality/reduction.html) to the bare minimum required to reproduce the issue. This makes it *much* easier to isolate and fix the offending code. Bugs reported without reduced test cases take on average 9001% longer to fix than bugs that are submitted with them, so you really should try to do this if at all possible. Search for related or duplicate issues Go to the [Viewer issue tracker](https://github.com/fengyuanchen/viewerjs/issues) and make sure the problem hasn't already been reported. If not, create a new issue there and include your test case. Browser support Remember that Viewer supports multiple browsers and their versions; any contributed code must work in all of them. You can refer to the [browser support page](README.md#browser-support) for the current list of supported browsers. ## Notes for pull request - Run the test suites in the `test` directory first. - Don't modify any files in the `dist` directory. - Follow the same code style as the library.
<?php namespace Zend\Code\Scanner; use Zend\Code\Annotation\AnnotationManager; use Zend\Code\Exception; use Zend\Code\NameInformation; use function array_slice; use function count; use function is_int; use function is_string; use function ltrim; use function strtolower; use function substr_count; use function var_export; class MethodScanner implements ScannerInterface { /** * @var bool */ protected $isScanned = false; /** * @var string */ protected $docComment; /** * @var ClassScanner */ protected $scannerClass; /** * @var string */ protected $class; /** * @var string */ protected $name; /** * @var int */ protected $lineStart; /** * @var int */ protected $lineEnd; /** * @var bool */ protected $isFinal = false; /** * @var bool */ protected $isAbstract = false; /** * @var bool */ protected $isPublic = true; /** * @var bool */ protected $isProtected = false; /** * @var bool */ protected $isPrivate = false; /** * @var bool */ protected $isStatic = false; /** * @var string */ protected $body = ''; /** * @var array */ protected $tokens = []; /** * @var NameInformation */ protected $nameInformation; /** * @var array */ protected $infos = []; /** * @param array $methodTokens * @param NameInformation $nameInformation */ public function __construct(array $methodTokens, NameInformation $nameInformation = null) { $this->tokens = $methodTokens; $this->nameInformation = $nameInformation; } /** * @param string $class * @return MethodScanner */ public function setClass($class) { $this->class = (string) $class; return $this; } /** * @param ClassScanner $scannerClass * @return MethodScanner */ public function setScannerClass(ClassScanner $scannerClass) { $this->scannerClass = $scannerClass; return $this; } /** * @return ClassScanner */ public function getClassScanner() { return $this->scannerClass; } /** * @return string */ public function getName() { $this->scan(); return $this->name; } /** * @return int */ public function getLineStart() { $this->scan(); return $this->lineStart; } /** * @return int */ public function getLineEnd() { $this->scan(); return $this->lineEnd; } /** * @return string */ public function getDocComment() { $this->scan(); return $this->docComment; } /** * @param AnnotationManager $annotationManager * @return AnnotationScanner|false */ public function getAnnotations(AnnotationManager $annotationManager) { if (($docComment = $this->getDocComment()) == '') { return false; } return new AnnotationScanner($annotationManager, $docComment, $this->nameInformation); } /** * @return bool */ public function isFinal() { $this->scan(); return $this->isFinal; } /** * @return bool */ public function isAbstract() { $this->scan(); return $this->isAbstract; } /** * @return bool */ public function isPublic() { $this->scan(); return $this->isPublic; } /** * @return bool */ public function isProtected() { $this->scan(); return $this->isProtected; } /** * @return bool */ public function isPrivate() { $this->scan(); return $this->isPrivate; } /** * @return bool */ public function isStatic() { $this->scan(); return $this->isStatic; } /** * Override the given name for a method, this is necessary to * support traits. * * @param string $name * @return self */ public function setName($name) { $this->name = $name; return $this; } /** * Visibility must be of T_PUBLIC, T_PRIVATE or T_PROTECTED * Needed to support traits * * @param int $visibility T_PUBLIC | T_PRIVATE | T_PROTECTED * @return self * @throws \Zend\Code\Exception\<API key> */ public function setVisibility($visibility) { switch ($visibility) { case T_PUBLIC: $this->isPublic = true; $this->isPrivate = false; $this->isProtected = false; break; case T_PRIVATE: $this->isPublic = false; $this->isPrivate = true; $this->isProtected = false; break; case T_PROTECTED: $this->isPublic = false; $this->isPrivate = false; $this->isProtected = true; break; default: throw new Exception\<API key>('Invalid visibility argument passed to setVisibility.'); } return $this; } /** * @return int */ public function <API key>() { return count($this->getParameters()); } /** * @param bool $returnScanner * @return array */ public function getParameters($returnScanner = false) { $this->scan(); $return = []; foreach ($this->infos as $info) { if ($info['type'] != 'parameter') { continue; } if (! $returnScanner) { $return[] = $info['name']; } else { $return[] = $this->getParameter($info['name']); } } return $return; } /** * @param int|string $<API key> * @return ParameterScanner * @throws Exception\<API key> */ public function getParameter($<API key>) { $this->scan(); if (is_int($<API key>)) { $info = $this->infos[$<API key>]; if ($info['type'] != 'parameter') { throw new Exception\<API key>('Index of info offset is not about a parameter'); } } elseif (is_string($<API key>)) { foreach ($this->infos as $info) { if ($info['type'] === 'parameter' && $info['name'] === $<API key>) { break; } unset($info); } if (! isset($info)) { throw new Exception\<API key>('Index of info offset is not about a parameter'); } } $p = new ParameterScanner( array_slice($this->tokens, $info['tokenStart'], $info['tokenEnd'] - $info['tokenStart']), $this->nameInformation ); $p-><API key>($this->name); $p-><API key>($this); $p->setDeclaringClass($this->class); $p-><API key>($this->scannerClass); $p->setPosition($info['position']); return $p; } /** * @return string */ public function getBody() { $this->scan(); return $this->body; } public static function export() { // @todo } public function __toString() { $this->scan(); return var_export($this, true); } protected function scan() { if ($this->isScanned) { return; } if (! $this->tokens) { throw new Exception\RuntimeException('No tokens were provided'); } /** * Variables & Setup */ $tokens = &$this->tokens; // localize $infos = &$this->infos; // localize $tokenIndex = null; $token = null; $tokenType = null; $tokenContent = null; $tokenLine = null; $infoIndex = 0; $parentCount = 0; /* * MACRO creation */ $MACRO_TOKEN_ADVANCE = function () use ( &$tokens, &$tokenIndex, &$token, &$tokenType, &$tokenContent, &$tokenLine ) { static $lastTokenArray = null; $tokenIndex = $tokenIndex === null ? 0 : $tokenIndex + 1; if (! isset($tokens[$tokenIndex])) { $token = false; $tokenContent = false; $tokenType = false; $tokenLine = false; return false; } $token = $tokens[$tokenIndex]; if (is_string($token)) { $tokenType = null; $tokenContent = $token; $tokenLine += substr_count( $lastTokenArray[1] ?? '', "\n" ); // adjust token line by last known newline count } else { $lastTokenArray = $token; [$tokenType, $tokenContent, $tokenLine] = $token; } return $tokenIndex; }; $MACRO_INFO_START = function () use (&$infoIndex, &$infos, &$tokenIndex, &$tokenLine) { $infos[$infoIndex] = [ 'type' => 'parameter', 'tokenStart' => $tokenIndex, 'tokenEnd' => null, 'lineStart' => $tokenLine, 'lineEnd' => $tokenLine, 'name' => null, 'position' => $infoIndex + 1, // position is +1 of infoIndex ]; }; $MACRO_INFO_ADVANCE = function () use (&$infoIndex, &$infos, &$tokenIndex, &$tokenLine) { $infos[$infoIndex]['tokenEnd'] = $tokenIndex; $infos[$infoIndex]['lineEnd'] = $tokenLine; $infoIndex++; return $infoIndex; }; /** * START FINITE STATE MACHINE FOR SCANNING TOKENS */ // Initialize token $MACRO_TOKEN_ADVANCE(); SCANNER_TOP: $this->lineStart = $this->lineStart ? : $tokenLine; switch ($tokenType) { case T_DOC_COMMENT: $this->lineStart = null; if ($this->docComment === null && $this->name === null) { $this->docComment = $tokenContent; } goto <API key>; // goto (no break needed); case T_FINAL: $this->isFinal = true; goto <API key>; // goto (no break needed); case T_ABSTRACT: $this->isAbstract = true; goto <API key>; // goto (no break needed); case T_PUBLIC: // use defaults goto <API key>; // goto (no break needed); case T_PROTECTED: $this->setVisibility(T_PROTECTED); goto <API key>; // goto (no break needed); case T_PRIVATE: $this->setVisibility(T_PRIVATE); goto <API key>; // goto (no break needed); case T_STATIC: $this->isStatic = true; goto <API key>; // goto (no break needed); case T_NS_SEPARATOR: if (! isset($infos[$infoIndex])) { $MACRO_INFO_START(); } goto <API key>; // goto (no break needed); case T_VARIABLE: case T_STRING: if ($tokenType === T_STRING && $parentCount === 0) { $this->name = $tokenContent; } if ($parentCount === 1) { if (! isset($infos[$infoIndex])) { $MACRO_INFO_START(); } if ($tokenType === T_VARIABLE) { $infos[$infoIndex]['name'] = ltrim($tokenContent, '$'); } } goto <API key>; // goto (no break needed); case null: switch ($tokenContent) { case '&': if (! isset($infos[$infoIndex])) { $MACRO_INFO_START(); } goto <API key>; // goto (no break needed); case '(': $parentCount++; goto <API key>; // goto (no break needed); case ')': $parentCount if ($parentCount > 0) { goto <API key>; } if ($parentCount === 0) { if ($infos) { $MACRO_INFO_ADVANCE(); } $context = 'body'; } goto <API key>; // goto (no break needed); case ',': if ($parentCount === 1) { $MACRO_INFO_ADVANCE(); } goto <API key>; } } <API key>: if ($MACRO_TOKEN_ADVANCE() === false) { goto SCANNER_END; } goto SCANNER_TOP; <API key>: $braceCount = 0; while ($MACRO_TOKEN_ADVANCE() !== false) { if ($tokenContent == '}') { $braceCount } if ($braceCount > 0) { $this->body .= $tokenContent; } if ($tokenContent == '{') { $braceCount++; } $this->lineEnd = $tokenLine; } SCANNER_END: $this->isScanned = true; } }
<?php declare(strict_types=1); namespace OxidEsales\EshopCommunity\Internal\Framework\Module\Configuration\Exception; class <API key> extends \Exception { }
#ifndef __AGENT_H #define __AGENT_H #include "libssh/libssh.h" /* Messages for the authentication agent connection. */ #define <API key> 1 #define <API key> 2 #define <API key> 3 #define <API key> 4 #define SSH_AGENT_FAILURE 5 #define SSH_AGENT_SUCCESS 6 #define <API key> 7 #define <API key> 8 #define <API key> 9 /* private OpenSSH extensions for SSH2 */ #define <API key> 11 #define <API key> 12 #define <API key> 13 #define <API key> 14 #define <API key> 17 #define <API key> 18 #define <API key> 19 /* smartcard */ #define <API key> 20 #define <API key> 21 /* lock/unlock the agent */ #define SSH_AGENTC_LOCK 22 #define SSH_AGENTC_UNLOCK 23 /* add key with constraints */ #define <API key> 24 #define <API key> 25 #define <API key> 26 #define <API key> 1 #define <API key> 2 /* extended failure messages */ #define SSH2_AGENT_FAILURE 30 /* additional error code for ssh.com's ssh-agent2 */ #define <API key> 102 #define <API key> 0x01 struct ssh_agent_struct { struct socket *sock; ssh_buffer ident; unsigned int count; }; #ifndef _WIN32 /* agent.c */ /** * @brief Create a new ssh agent structure. * * @return An allocated ssh agent structure or NULL on error. */ struct ssh_agent_struct *agent_new(struct ssh_session_struct *session); void agent_close(struct ssh_agent_struct *agent); /** * @brief Free an allocated ssh agent structure. * * @param agent The ssh agent structure to free. */ void agent_free(struct ssh_agent_struct *agent); /** * @brief Check if the ssh agent is running. * * @param session The ssh session to check for the agent. * * @return 1 if it is running, 0 if not. */ int agent_is_running(struct ssh_session_struct *session); int <API key>(struct ssh_session_struct *session); struct <API key> *<API key>(struct ssh_session_struct *session, char **comment); struct <API key> *<API key>(struct ssh_session_struct *session, char **comment); ssh_string agent_sign_data(struct ssh_session_struct *session, struct ssh_buffer_struct *data, struct <API key> *pubkey); #endif #endif /* __AGENT_H */ /* vim: set ts=2 sw=2 et cindent: */
package com.redhat.lightblue.config; import org.junit.Assert; import org.junit.Test; import com.fasterxml.jackson.databind.JsonNode; import com.redhat.lightblue.Request; import com.redhat.lightblue.crud.DeleteRequest; import com.redhat.lightblue.util.test.FileUtil; import static com.redhat.lightblue.util.JsonUtils.json; public class CrudValidationTest { @Test public void <API key>() throws Exception { LightblueFactory lbf = new LightblueFactory(new <API key>()); // Emulate configuration lbf.getJsonTranslator().setValidation(Request.class, false); String jsonString = FileUtil.readFile("valid-deletion-req.json"); JsonNode node = json(jsonString); DeleteRequest req = lbf.getJsonTranslator().parse(DeleteRequest.class, node); Assert.assertNotNull(req); } @Test public void <API key>() throws Exception { LightblueFactory lbf = new LightblueFactory(new <API key>()); // Emulate configuration lbf.getJsonTranslator().setValidation(Request.class, false); String jsonString = FileUtil.readFile("<API key>.json"); JsonNode node = json(jsonString); DeleteRequest req = lbf.getJsonTranslator().parse(DeleteRequest.class, node); Assert.assertNotNull(req); } @Test public void <API key>() throws Exception { LightblueFactory lbf = new LightblueFactory(new <API key>()); // Emulate configuration lbf.getJsonTranslator().setValidation(Request.class, true); String jsonString = FileUtil.readFile("valid-deletion-req.json"); JsonNode node = json(jsonString); DeleteRequest req = lbf.getJsonTranslator().parse(DeleteRequest.class, node); Assert.assertNotNull(req); } @Test public void <API key>() throws Exception { LightblueFactory lbf = new LightblueFactory(new <API key>()); // Emulate configuration lbf.getJsonTranslator().setValidation(Request.class, true); String jsonString = FileUtil.readFile("<API key>.json"); JsonNode node = json(jsonString); try { lbf.getJsonTranslator().parse(DeleteRequest.class, node); Assert.fail(); } catch (Exception e) { System.out.println(e); } } }
#pragma once #include <WeaselIPC.h> struct KeyInfo { UINT repeatCount: 16; UINT scanCode: 8; UINT isExtended: 1; UINT reserved: 4; UINT contextCode: 1; UINT prevKeyState: 1; UINT isKeyUp: 1; KeyInfo(LPARAM lparam) { *this = *reinterpret_cast<KeyInfo*>(&lparam); } operator UINT32() { return *reinterpret_cast<UINT32*>(this); } }; bool ConvertKeyEvent(UINT vkey, KeyInfo kinfo, const LPBYTE keyState, weasel::KeyEvent& result); namespace ibus { // keycodes enum Keycode { VoidSymbol = 0xFFFFFF, space = 0x020, grave = 0x060, BackSpace = 0xFF08, Tab = 0xFF09, Linefeed = 0xFF0A, Clear = 0xFF0B, Return = 0xFF0D, Pause = 0xFF13, Scroll_Lock = 0xFF14, Sys_Req = 0xFF15, Escape = 0xFF1B, Delete = 0xFFFF, Multi_key = 0xFF20, Codeinput = 0xFF37, SingleCandidate = 0xFF3C, MultipleCandidate = 0xFF3D, PreviousCandidate = 0xFF3E, Kanji = 0xFF21, Muhenkan = 0xFF22, Henkan_Mode = 0xFF23, Henkan = 0xFF23, Romaji = 0xFF24, Hiragana = 0xFF25, Katakana = 0xFF26, Hiragana_Katakana = 0xFF27, Zenkaku = 0xFF28, Hankaku = 0xFF29, Zenkaku_Hankaku = 0xFF2A, Touroku = 0xFF2B, Massyo = 0xFF2C, Kana_Lock = 0xFF2D, Kana_Shift = 0xFF2E, Eisu_Shift = 0xFF2F, Eisu_toggle = 0xFF30, Kanji_Bangou = 0xFF37, Zen_Koho = 0xFF3D, Mae_Koho = 0xFF3E, Home = 0xFF50, Left = 0xFF51, Up = 0xFF52, Right = 0xFF53, Down = 0xFF54, Prior = 0xFF55, Page_Up = 0xFF55, Next = 0xFF56, Page_Down = 0xFF56, End = 0xFF57, Begin = 0xFF58, Select = 0xFF60, Print = 0xFF61, Execute = 0xFF62, Insert = 0xFF63, Undo = 0xFF65, Redo = 0xFF66, Menu = 0xFF67, Find = 0xFF68, Cancel = 0xFF69, Help = 0xFF6A, Break = 0xFF6B, Mode_switch = 0xFF7E, script_switch = 0xFF7E, Num_Lock = 0xFF7F, KP_Space = 0xFF80, KP_Tab = 0xFF89, KP_Enter = 0xFF8D, KP_F1 = 0xFF91, KP_F2 = 0xFF92, KP_F3 = 0xFF93, KP_F4 = 0xFF94, KP_Home = 0xFF95, KP_Left = 0xFF96, KP_Up = 0xFF97, KP_Right = 0xFF98, KP_Down = 0xFF99, KP_Prior = 0xFF9A, KP_Page_Up = 0xFF9A, KP_Next = 0xFF9B, KP_Page_Down = 0xFF9B, KP_End = 0xFF9C, KP_Begin = 0xFF9D, KP_Insert = 0xFF9E, KP_Delete = 0xFF9F, KP_Equal = 0xFFBD, KP_Multiply = 0xFFAA, KP_Add = 0xFFAB, KP_Separator = 0xFFAC, KP_Subtract = 0xFFAD, KP_Decimal = 0xFFAE, KP_Divide = 0xFFAF, KP_0 = 0xFFB0, KP_1 = 0xFFB1, KP_2 = 0xFFB2, KP_3 = 0xFFB3, KP_4 = 0xFFB4, KP_5 = 0xFFB5, KP_6 = 0xFFB6, KP_7 = 0xFFB7, KP_8 = 0xFFB8, KP_9 = 0xFFB9, F1 = 0xFFBE, F2 = 0xFFBF, F3 = 0xFFC0, F4 = 0xFFC1, F5 = 0xFFC2, F6 = 0xFFC3, F7 = 0xFFC4, F8 = 0xFFC5, F9 = 0xFFC6, F10 = 0xFFC7, F11 = 0xFFC8, L1 = 0xFFC8, F12 = 0xFFC9, L2 = 0xFFC9, F13 = 0xFFCA, L3 = 0xFFCA, F14 = 0xFFCB, L4 = 0xFFCB, F15 = 0xFFCC, L5 = 0xFFCC, F16 = 0xFFCD, L6 = 0xFFCD, F17 = 0xFFCE, L7 = 0xFFCE, F18 = 0xFFCF, L8 = 0xFFCF, F19 = 0xFFD0, L9 = 0xFFD0, F20 = 0xFFD1, L10 = 0xFFD1, F21 = 0xFFD2, R1 = 0xFFD2, F22 = 0xFFD3, R2 = 0xFFD3, F23 = 0xFFD4, R3 = 0xFFD4, F24 = 0xFFD5, R4 = 0xFFD5, F25 = 0xFFD6, R5 = 0xFFD6, F26 = 0xFFD7, R6 = 0xFFD7, F27 = 0xFFD8, R7 = 0xFFD8, F28 = 0xFFD9, R8 = 0xFFD9, F29 = 0xFFDA, R9 = 0xFFDA, F30 = 0xFFDB, R10 = 0xFFDB, F31 = 0xFFDC, R11 = 0xFFDC, F32 = 0xFFDD, R12 = 0xFFDD, F33 = 0xFFDE, R13 = 0xFFDE, F34 = 0xFFDF, R14 = 0xFFDF, F35 = 0xFFE0, R15 = 0xFFE0, Shift_L = 0xFFE1, Shift_R = 0xFFE2, Control_L = 0xFFE3, Control_R = 0xFFE4, Caps_Lock = 0xFFE5, Shift_Lock = 0xFFE6, Meta_L = 0xFFE7, Meta_R = 0xFFE8, Alt_L = 0xFFE9, Alt_R = 0xFFEA, Super_L = 0xFFEB, Super_R = 0xFFEC, Hyper_L = 0xFFED, Hyper_R = 0xFFEE, Null = 0 }; // modifiers, modified to fit a UINT16 enum Modifier { NULL_MASK = 0, SHIFT_MASK = 1 << 0, LOCK_MASK = 1 << 1, CONTROL_MASK = 1 << 2, ALT_MASK = 1 << 3, MOD1_MASK = 1 << 3, MOD2_MASK = 1 << 4, MOD3_MASK = 1 << 5, MOD4_MASK = 1 << 6, MOD5_MASK = 1 << 7, HANDLED_MASK = 1 << 8, IGNORED_MASK = 1 << 9, FORWARD_MASK = 1 << 9, SUPER_MASK = 1 << 10, HYPER_MASK = 1 << 11, META_MASK = 1 << 12, RELEASE_MASK = 1 << 14, MODIFIER_MASK = 0x2fff }; }
package org.obiba.mica.micaConfig.service; import org.obiba.mica.micaConfig.domain.PopulationConfig; import org.obiba.mica.micaConfig.repository.<API key>; import org.springframework.stereotype.Component; import javax.inject.Inject; @Component public class <API key> extends EntityConfigService<PopulationConfig> { @Inject <API key> <API key>; @Override protected <API key> getRepository() { return <API key>; } @Override protected String getDefaultId() { return "default"; } @Override protected PopulationConfig createEmptyForm() { return new PopulationConfig(); } @Override protected String <API key>() { return "classpath:config/population-form/definition.json"; } @Override protected String <API key>() { return "classpath:config/population-form/<API key>.json"; } @Override protected String <API key>() { return "classpath:config/population-form/schema.json"; } @Override protected String <API key>() { return "classpath:config/population-form/schema-mandatory.json"; } }
/* Needed for POSIX.1-2008 locale functions */ /** @cond PRIVATE */ #define _XOPEN_SOURCE 700 /** @endcond */ #include <config.h> #include <ctype.h> #include <locale.h> #if defined(__FreeBSD__) || defined(__APPLE__) #include <xlocale.h> #endif #if defined(__FreeBSD__) #include <sys/param.h> #endif #include <stdint.h> #include <stdlib.h> #include <string.h> #include <strings.h> #include <errno.h> #include <libsigrok/libsigrok.h> #include "libsigrok-internal.h" /** @cond PRIVATE */ #define LOG_PREFIX "strutil" /** @endcond */ /** * @file * * Helper functions for handling or converting libsigrok-related strings. */ /** * @defgroup grp_strutil String utilities * * Helper functions for handling or converting libsigrok-related strings. * * @{ */ /** * Convert a string representation of a numeric value (base 10) to a long integer. The * conversion is strict and will fail if the complete string does not represent * a valid long integer. The function sets errno according to the details of the * failure. * * @param str The string representation to convert. * @param ret Pointer to long where the result of the conversion will be stored. * * @retval SR_OK Conversion successful. * @retval SR_ERR Failure. * * @private */ SR_PRIV int sr_atol(const char *str, long *ret) { long tmp; char *endptr = NULL; errno = 0; tmp = strtol(str, &endptr, 10); while (endptr && isspace(*endptr)) endptr++; if (!endptr || *endptr || errno) { if (!errno) errno = EINVAL; return SR_ERR; } *ret = tmp; return SR_OK; } /** * Convert a text to a number including support for non-decimal bases. * Also optionally returns the position after the number, where callers * can either error out, or support application specific suffixes. * * @param[in] str The input text to convert. * @param[out] ret The conversion result. * @param[out] end The position after the number. * @param[in] base The number format's base, can be 0. * * @retval SR_OK Conversion successful. * @retval SR_ERR Conversion failed. * * @private * * This routine is more general than @ref sr_atol(), which strictly * expects the input text to contain just a decimal number, and nothing * else in addition. The @ref sr_atol_base() routine accepts trailing * text after the number, and supports non-decimal numbers (bin, hex), * including automatic detection from prefix text. */ SR_PRIV int sr_atol_base(const char *str, long *ret, char **end, int base) { long num; char *endptr; /* Add "0b" prefix support which strtol(3) may be missing. */ while (str && isspace(*str)) str++; if (!base && strncmp(str, "0b", strlen("0b")) == 0) { str += strlen("0b"); base = 2; } /* Run the number conversion. Quick bail out if that fails. */ errno = 0; endptr = NULL; num = strtol(str, &endptr, base); if (!endptr || errno) { if (!errno) errno = EINVAL; return SR_ERR; } *ret = num; /* Advance to optional non-space trailing suffix. */ while (endptr && isspace(*endptr)) endptr++; if (end) *end = endptr; return SR_OK; } /** * Convert a string representation of a numeric value (base 10) to an integer. The * conversion is strict and will fail if the complete string does not represent * a valid integer. The function sets errno according to the details of the * failure. * * @param str The string representation to convert. * @param ret Pointer to int where the result of the conversion will be stored. * * @retval SR_OK Conversion successful. * @retval SR_ERR Failure. * * @private */ SR_PRIV int sr_atoi(const char *str, int *ret) { long tmp; if (sr_atol(str, &tmp) != SR_OK) return SR_ERR; if ((int) tmp != tmp) { errno = ERANGE; return SR_ERR; } *ret = (int) tmp; return SR_OK; } /** * Convert a string representation of a numeric value to a double. The * conversion is strict and will fail if the complete string does not represent * a valid double. The function sets errno according to the details of the * failure. * * @param str The string representation to convert. * @param ret Pointer to double where the result of the conversion will be stored. * * @retval SR_OK Conversion successful. * @retval SR_ERR Failure. * * @private */ SR_PRIV int sr_atod(const char *str, double *ret) { double tmp; char *endptr = NULL; errno = 0; tmp = strtof(str, &endptr); while (endptr && isspace(*endptr)) endptr++; if (!endptr || *endptr || errno) { if (!errno) errno = EINVAL; return SR_ERR; } *ret = tmp; return SR_OK; } /** * Convert a string representation of a numeric value to a float. The * conversion is strict and will fail if the complete string does not represent * a valid float. The function sets errno according to the details of the * failure. * * @param str The string representation to convert. * @param ret Pointer to float where the result of the conversion will be stored. * * @retval SR_OK Conversion successful. * @retval SR_ERR Failure. * * @private */ SR_PRIV int sr_atof(const char *str, float *ret) { double tmp; if (sr_atod(str, &tmp) != SR_OK) return SR_ERR; if ((float) tmp != tmp) { errno = ERANGE; return SR_ERR; } *ret = (float) tmp; return SR_OK; } /** * Convert a string representation of a numeric value to a double. The * conversion is strict and will fail if the complete string does not represent * a valid double. The function sets errno according to the details of the * failure. This version ignores the locale. * * @param str The string representation to convert. * @param ret Pointer to double where the result of the conversion will be stored. * * @retval SR_OK Conversion successful. * @retval SR_ERR Failure. * * @private */ SR_PRIV int sr_atod_ascii(const char *str, double *ret) { double tmp; char *endptr = NULL; errno = 0; tmp = g_ascii_strtod(str, &endptr); if (!endptr || *endptr || errno) { if (!errno) errno = EINVAL; return SR_ERR; } *ret = tmp; return SR_OK; } /** * Convert a string representation of a numeric value to a float. The * conversion is strict and will fail if the complete string does not represent * a valid float. The function sets errno according to the details of the * failure. This version ignores the locale. * * @param str The string representation to convert. * @param ret Pointer to float where the result of the conversion will be stored. * * @retval SR_OK Conversion successful. * @retval SR_ERR Failure. * * @private */ SR_PRIV int sr_atof_ascii(const char *str, float *ret) { double tmp; char *endptr = NULL; errno = 0; tmp = g_ascii_strtod(str, &endptr); if (!endptr || *endptr || errno) { if (!errno) errno = EINVAL; return SR_ERR; } /* FIXME This fails unexpectedly. Some other method to safel downcast * needs to be found. Checking against FLT_MAX doesn't work as well. */ /* if ((float) tmp != tmp) { errno = ERANGE; sr_dbg("ERANGEEEE %e != %e", (float) tmp, tmp); return SR_ERR; } */ *ret = (float) tmp; return SR_OK; } /** * Compose a string with a format string in the buffer pointed to by buf. * * It is up to the caller to ensure that the allocated buffer is large enough * to hold the formatted result. * * A terminating NUL character is automatically appended after the content * written. * * After the format parameter, the function expects at least as many additional * arguments as needed for format. * * This version ignores the current locale and uses the locale "C" for Linux, * FreeBSD, OSX and Android. * * @param buf Pointer to a buffer where the resulting C string is stored. * @param format C string that contains a format string (see printf). * @param ... A sequence of additional arguments, each containing a value to be * used to replace a format specifier in the format string. * * @return On success, the number of characters that would have been written, * not counting the terminating NUL character. * * @since 0.6.0 */ SR_API int sr_sprintf_ascii(char *buf, const char *format, ...) { int ret; va_list args; va_start(args, format); ret = sr_vsprintf_ascii(buf, format, args); va_end(args); return ret; } /** * Compose a string with a format string in the buffer pointed to by buf. * * It is up to the caller to ensure that the allocated buffer is large enough * to hold the formatted result. * * Internally, the function retrieves arguments from the list identified by * args as if va_arg was used on it, and thus the state of args is likely to * be altered by the call. * * In any case, args should have been initialized by va_start at some point * before the call, and it is expected to be released by va_end at some point * after the call. * * This version ignores the current locale and uses the locale "C" for Linux, * FreeBSD, OSX and Android. * * @param buf Pointer to a buffer where the resulting C string is stored. * @param format C string that contains a format string (see printf). * @param args A value identifying a variable arguments list initialized with * va_start. * * @return On success, the number of characters that would have been written, * not counting the terminating NUL character. * * @since 0.6.0 */ SR_API int sr_vsprintf_ascii(char *buf, const char *format, va_list args) { #if defined(_WIN32) int ret; #if 0 _locale_t locale; locale = _create_locale(LC_NUMERIC, "C"); ret = _vsprintf_l(buf, format, locale, args); _free_locale(locale); #endif /* vsprintf() uses the current locale, may not work correctly for floats. */ ret = vsprintf(buf, format, args); return ret; #elif defined(__APPLE__) int ret; locale_t locale; locale = newlocale(LC_NUMERIC_MASK, "C", NULL); ret = vsprintf_l(buf, locale, format, args); freelocale(locale); return ret; #elif defined(__FreeBSD__) && __FreeBSD_version >= 901000 int ret; locale_t locale; locale = newlocale(LC_NUMERIC_MASK, "C", NULL); ret = vsprintf_l(buf, locale, format, args); freelocale(locale); return ret; #elif defined(__ANDROID__) int ret; ret = vsprintf(buf, format, args); return ret; #elif defined(__linux__) int ret; locale_t old_locale, temp_locale; /* Switch to C locale for proper float/double conversion. */ temp_locale = newlocale(LC_NUMERIC, "C", NULL); old_locale = uselocale(temp_locale); ret = vsprintf(buf, format, args); /* Switch back to original locale. */ uselocale(old_locale); freelocale(temp_locale); return ret; #elif defined(__unix__) || defined(__unix) /* * This is a fallback for all other BSDs, *nix and FreeBSD <= 9.0, by * using the current locale for snprintf(). This may not work correctly * for floats! */ int ret; ret = vsprintf(buf, format, args); return ret; #else /* No implementation for unknown systems! */ return -1; #endif } /** * Composes a string with a format string (like printf) in the buffer pointed * by buf (taking buf_size as the maximum buffer capacity to fill). * If the resulting string would be longer than n - 1 characters, the remaining * characters are discarded and not stored, but counted for the value returned * by the function. * A terminating NUL character is automatically appended after the content * written. * After the format parameter, the function expects at least as many additional * arguments as needed for format. * * This version ignores the current locale and uses the locale "C" for Linux, * FreeBSD, OSX and Android. * * @param buf Pointer to a buffer where the resulting C string is stored. * @param buf_size Maximum number of bytes to be used in the buffer. The * generated string has a length of at most buf_size - 1, leaving space * for the additional terminating NUL character. * @param format C string that contains a format string (see printf). * @param ... A sequence of additional arguments, each containing a value to be * used to replace a format specifier in the format string. * * @return On success, the number of characters that would have been written if * buf_size had been sufficiently large, not counting the terminating * NUL character. On failure, a negative number is returned. * Notice that only when this returned value is non-negative and less * than buf_size, the string has been completely written. * * @since 0.6.0 */ SR_API int sr_snprintf_ascii(char *buf, size_t buf_size, const char *format, ...) { int ret; va_list args; va_start(args, format); ret = sr_vsnprintf_ascii(buf, buf_size, format, args); va_end(args); return ret; } /** * Composes a string with a format string (like printf) in the buffer pointed * by buf (taking buf_size as the maximum buffer capacity to fill). * If the resulting string would be longer than n - 1 characters, the remaining * characters are discarded and not stored, but counted for the value returned * by the function. * A terminating NUL character is automatically appended after the content * written. * Internally, the function retrieves arguments from the list identified by * args as if va_arg was used on it, and thus the state of args is likely to * be altered by the call. * In any case, arg should have been initialized by va_start at some point * before the call, and it is expected to be released by va_end at some point * after the call. * * This version ignores the current locale and uses the locale "C" for Linux, * FreeBSD, OSX and Android. * * @param buf Pointer to a buffer where the resulting C string is stored. * @param buf_size Maximum number of bytes to be used in the buffer. The * generated string has a length of at most buf_size - 1, leaving space * for the additional terminating NUL character. * @param format C string that contains a format string (see printf). * @param args A value identifying a variable arguments list initialized with * va_start. * * @return On success, the number of characters that would have been written if * buf_size had been sufficiently large, not counting the terminating * NUL character. On failure, a negative number is returned. * Notice that only when this returned value is non-negative and less * than buf_size, the string has been completely written. * * @since 0.6.0 */ SR_API int sr_vsnprintf_ascii(char *buf, size_t buf_size, const char *format, va_list args) { #if defined(_WIN32) int ret; #if 0 _locale_t locale; locale = _create_locale(LC_NUMERIC, "C"); ret = _vsnprintf_l(buf, buf_size, format, locale, args); _free_locale(locale); #endif /* vsprintf uses the current locale, may cause issues for floats. */ ret = vsnprintf(buf, buf_size, format, args); return ret; #elif defined(__APPLE__) int ret; locale_t locale; locale = newlocale(LC_NUMERIC_MASK, "C", NULL); ret = vsnprintf_l(buf, buf_size, locale, format, args); freelocale(locale); return ret; #elif defined(__FreeBSD__) && __FreeBSD_version >= 901000 int ret; locale_t locale; locale = newlocale(LC_NUMERIC_MASK, "C", NULL); ret = vsnprintf_l(buf, buf_size, locale, format, args); freelocale(locale); return ret; #elif defined(__ANDROID__) int ret; ret = vsnprintf(buf, buf_size, format, args); return ret; #elif defined(__linux__) int ret; locale_t old_locale, temp_locale; /* Switch to C locale for proper float/double conversion. */ temp_locale = newlocale(LC_NUMERIC, "C", NULL); old_locale = uselocale(temp_locale); ret = vsnprintf(buf, buf_size, format, args); /* Switch back to original locale. */ uselocale(old_locale); freelocale(temp_locale); return ret; #elif defined(__unix__) || defined(__unix) /* * This is a fallback for all other BSDs, *nix and FreeBSD <= 9.0, by * using the current locale for snprintf(). This may not work correctly * for floats! */ int ret; ret = vsnprintf(buf, buf_size, format, args); return ret; #else /* No implementation for unknown systems! */ return -1; #endif } /** * Convert a sequence of bytes to its textual representation ("hex dump"). * * Callers should free the allocated GString. See sr_hexdump_free(). * * @param[in] data Pointer to the byte sequence to print. * @param[in] len Number of bytes to print. * * @return NULL upon error, newly allocated GString pointer otherwise. * * @private */ SR_PRIV GString *sr_hexdump_new(const uint8_t *data, const size_t len) { GString *s; size_t i; s = g_string_sized_new(3 * len); for (i = 0; i < len; i++) { if (i) g_string_append_c(s, ' '); <API key>(s, "%02x", data[i]); } return s; } SR_PRIV void sr_hexdump_free(GString *s) { if (s) g_string_free(s, TRUE); } /** * Convert a string representation of a numeric value to a sr_rational. * * The conversion is strict and will fail if the complete string does not * represent a valid number. The function sets errno according to the details * of the failure. This version ignores the locale. * * @param str The string representation to convert. * @param ret Pointer to sr_rational where the result of the conversion will be stored. * * @retval SR_OK Conversion successful. * @retval SR_ERR Failure. * * @since 0.5.0 */ SR_API int sr_parse_rational(const char *str, struct sr_rational *ret) { char *endptr = NULL; int64_t integral; int64_t fractional = 0; int64_t denominator = 1; int32_t fractional_len = 0; int32_t exponent = 0; gboolean is_negative = FALSE; gboolean no_integer, no_fractional; while (isspace(*str)) str++; errno = 0; integral = g_ascii_strtoll(str, &endptr, 10); if (str == endptr && (str[0] == '-' || str[0] == '+') && str[1] == '.') { endptr += 1; no_integer = TRUE; } else if (str == endptr && str[0] == '.') { no_integer = TRUE; } else if (errno) { return SR_ERR; } else { no_integer = FALSE; } if (integral < 0 || str[0] == '-') is_negative = TRUE; errno = 0; if (*endptr == '.') { gboolean is_exp, is_eos; const char *start = endptr + 1; fractional = g_ascii_strtoll(start, &endptr, 10); is_exp = *endptr == 'E' || *endptr == 'e'; is_eos = *endptr == '\0'; if (endptr == start && (is_exp || is_eos)) { fractional = 0; errno = 0; } if (errno) return SR_ERR; no_fractional = endptr == start; if (no_integer && no_fractional) return SR_ERR; fractional_len = endptr - start; } errno = 0; if ((*endptr == 'E') || (*endptr == 'e')) { exponent = g_ascii_strtoll(endptr + 1, &endptr, 10); if (errno) return SR_ERR; } if (*endptr != '\0') return SR_ERR; for (int i = 0; i < fractional_len; i++) integral *= 10; exponent -= fractional_len; if (!is_negative) integral += fractional; else integral -= fractional; while (exponent > 0) { integral *= 10; exponent } while (exponent < 0) { denominator *= 10; exponent++; } ret->p = integral; ret->q = denominator; return SR_OK; } /** * Convert a numeric value value to its "natural" string representation * in SI units. * * E.g. a value of 3000000, with units set to "W", would be converted * to "3 MW", 20000 to "20 kW", 31500 would become "31.5 kW". * * @param x The value to convert. * @param unit The unit to append to the string, or NULL if the string * has no units. * * @return A newly allocated string representation of the samplerate value, * or NULL upon errors. The caller is responsible to g_free() the * memory. * * @since 0.2.0 */ SR_API char *sr_si_string_u64(uint64_t x, const char *unit) { uint8_t i; uint64_t quot, divisor[] = { SR_HZ(1), SR_KHZ(1), SR_MHZ(1), SR_GHZ(1), SR_GHZ(1000), SR_GHZ(1000 * 1000), SR_GHZ(1000 * 1000 * 1000), }; const char *p, prefix[] = "\0kMGTPE"; char fmt[16], fract[20] = "", *f; if (!unit) unit = ""; for (i = 0; (quot = x / divisor[i]) >= 1000; i++); if (i) { sprintf(fmt, ".%%0%d"PRIu64, i * 3); f = fract + sprintf(fract, fmt, x % divisor[i]) - 1; while (f >= fract && strchr("0.", *f)) *f } p = prefix + i; return g_strdup_printf("%" PRIu64 "%s %.1s%s", quot, fract, p, unit); } /** * Convert a numeric samplerate value to its "natural" string representation. * * E.g. a value of 3000000 would be converted to "3 MHz", 20000 to "20 kHz", * 31500 would become "31.5 kHz". * * @param samplerate The samplerate in Hz. * * @return A newly allocated string representation of the samplerate value, * or NULL upon errors. The caller is responsible to g_free() the * memory. * * @since 0.1.0 */ SR_API char *<API key>(uint64_t samplerate) { return sr_si_string_u64(samplerate, "Hz"); } /** * Convert a numeric period value to the "natural" string representation * of its period value. * * The period is specified as a rational number's numerator and denominator. * * E.g. a pair of (1, 5) would be converted to "200 ms", (10, 100) to "100 ms". * * @param v_p The period numerator. * @param v_q The period denominator. * * @return A newly allocated string representation of the period value, * or NULL upon errors. The caller is responsible to g_free() the * memory. * * @since 0.5.0 */ SR_API char *sr_period_string(uint64_t v_p, uint64_t v_q) { double freq, v; int prec; freq = 1 / ((double)v_p / v_q); if (freq > SR_GHZ(1)) { v = (double)v_p / v_q * 1000000000000.0; prec = ((v - (uint64_t)v) < FLT_MIN) ? 0 : 3; return g_strdup_printf("%.*f ps", prec, v); } else if (freq > SR_MHZ(1)) { v = (double)v_p / v_q * 1000000000.0; prec = ((v - (uint64_t)v) < FLT_MIN) ? 0 : 3; return g_strdup_printf("%.*f ns", prec, v); } else if (freq > SR_KHZ(1)) { v = (double)v_p / v_q * 1000000.0; prec = ((v - (uint64_t)v) < FLT_MIN) ? 0 : 3; return g_strdup_printf("%.*f us", prec, v); } else if (freq > 1) { v = (double)v_p / v_q * 1000.0; prec = ((v - (uint64_t)v) < FLT_MIN) ? 0 : 3; return g_strdup_printf("%.*f ms", prec, v); } else { v = (double)v_p / v_q; prec = ((v - (uint64_t)v) < FLT_MIN) ? 0 : 3; return g_strdup_printf("%.*f s", prec, v); } } /** * Convert a numeric voltage value to the "natural" string representation * of its voltage value. The voltage is specified as a rational number's * numerator and denominator. * * E.g. a value of 300000 would be converted to "300mV", 2 to "2V". * * @param v_p The voltage numerator. * @param v_q The voltage denominator. * * @return A newly allocated string representation of the voltage value, * or NULL upon errors. The caller is responsible to g_free() the * memory. * * @since 0.2.0 */ SR_API char *sr_voltage_string(uint64_t v_p, uint64_t v_q) { if (v_q == 1000) return g_strdup_printf("%" PRIu64 " mV", v_p); else if (v_q == 1) return g_strdup_printf("%" PRIu64 " V", v_p); else return g_strdup_printf("%g V", (float)v_p / (float)v_q); } /** * Convert a "natural" string representation of a size value to uint64_t. * * E.g. a value of "3k" or "3 K" would be converted to 3000, a value * of "15M" would be converted to 15000000. * * Value representations other than decimal (such as hex or octal) are not * supported. Only 'k' (kilo), 'm' (mega), 'g' (giga) suffixes are supported. * Spaces (but not other whitespace) between value and suffix are allowed. * * @param sizestring A string containing a (decimal) size value. * @param size Pointer to uint64_t which will contain the string's size value. * * @return SR_OK upon success, SR_ERR upon errors. * * @since 0.1.0 */ SR_API int sr_parse_sizestring(const char *sizestring, uint64_t *size) { uint64_t multiplier; int done; double frac_part; char *s; *size = strtoull(sizestring, &s, 10); multiplier = 0; frac_part = 0; done = FALSE; while (s && *s && multiplier == 0 && !done) { switch (*s) { case ' ': break; case '.': frac_part = g_ascii_strtod(s, &s); break; case 'k': case 'K': multiplier = SR_KHZ(1); break; case 'm': case 'M': multiplier = SR_MHZ(1); break; case 'g': case 'G': multiplier = SR_GHZ(1); break; case 't': case 'T': multiplier = SR_GHZ(1000); break; case 'p': case 'P': multiplier = SR_GHZ(1000 * 1000); break; case 'e': case 'E': multiplier = SR_GHZ(1000 * 1000 * 1000); break; default: done = TRUE; s } s++; } if (multiplier > 0) { *size *= multiplier; *size += frac_part * multiplier; } else { *size += frac_part; } if (s && *s && g_ascii_strcasecmp(s, "Hz")) return SR_ERR; return SR_OK; } /** * Convert a "natural" string representation of a time value to an * uint64_t value in milliseconds. * * E.g. a value of "3s" or "3 s" would be converted to 3000, a value * of "15ms" would be converted to 15. * * Value representations other than decimal (such as hex or octal) are not * supported. Only lower-case "s" and "ms" time suffixes are supported. * Spaces (but not other whitespace) between value and suffix are allowed. * * @param timestring A string containing a (decimal) time value. * @return The string's time value as uint64_t, in milliseconds. * * @todo Add support for "m" (minutes) and others. * @todo Add support for picoseconds? * @todo Allow both lower-case and upper-case? If no, document it. * * @since 0.1.0 */ SR_API uint64_t sr_parse_timestring(const char *timestring) { uint64_t time_msec; char *s; /* TODO: Error handling, logging. */ time_msec = strtoull(timestring, &s, 10); if (time_msec == 0 && s == timestring) return 0; if (s && *s) { while (*s == ' ') s++; if (!strcmp(s, "s")) time_msec *= 1000; else if (!strcmp(s, "ms")) ; /* redundant */ else return 0; } return time_msec; } /** @since 0.1.0 */ SR_API gboolean sr_parse_boolstring(const char *boolstr) { /* * Complete absence of an input spec is assumed to mean TRUE, * as in command line option strings like this: * ...:samplerate=100k:header:numchannels=4:... */ if (!boolstr || !*boolstr) return TRUE; if (!g_ascii_strncasecmp(boolstr, "true", 4) || !g_ascii_strncasecmp(boolstr, "yes", 3) || !g_ascii_strncasecmp(boolstr, "on", 2) || !g_ascii_strncasecmp(boolstr, "1", 1)) return TRUE; return FALSE; } /** @since 0.2.0 */ SR_API int sr_parse_period(const char *periodstr, uint64_t *p, uint64_t *q) { char *s; *p = strtoull(periodstr, &s, 10); if (*p == 0 && s == periodstr) /* No digits found. */ return SR_ERR_ARG; if (s && *s) { while (*s == ' ') s++; if (!strcmp(s, "fs")) *q = UINT64_C(1000000000000000); else if (!strcmp(s, "ps")) *q = UINT64_C(1000000000000); else if (!strcmp(s, "ns")) *q = UINT64_C(1000000000); else if (!strcmp(s, "us")) *q = 1000000; else if (!strcmp(s, "ms")) *q = 1000; else if (!strcmp(s, "s")) *q = 1; else /* Must have a time suffix. */ return SR_ERR_ARG; } return SR_OK; } /** @since 0.2.0 */ SR_API int sr_parse_voltage(const char *voltstr, uint64_t *p, uint64_t *q) { char *s; *p = strtoull(voltstr, &s, 10); if (*p == 0 && s == voltstr) /* No digits found. */ return SR_ERR_ARG; if (s && *s) { while (*s == ' ') s++; if (!g_ascii_strcasecmp(s, "mv")) *q = 1000L; else if (!g_ascii_strcasecmp(s, "v")) *q = 1; else /* Must have a base suffix. */ return SR_ERR_ARG; } return SR_OK; }
/* control code for tailsitters. Enabled by setting Q_FRAME_CLASS=10 */ #include "Plane.h" /* return true when flying a tailsitter */ bool QuadPlane::is_tailsitter(void) { return available() && frame_class == AP_Motors::<API key>; } /* check if we are flying as a tailsitter */ bool QuadPlane::tailsitter_active(void) { return is_tailsitter() && in_vtol_mode(); } /* run output for tailsitters */ void QuadPlane::tailsitter_output(void) { if (!is_tailsitter()) { return; } if (!tailsitter_active()) { if (tailsitter.<API key> > 0) { // thrust vectoring in fixed wing flight float aileron = SRV_Channels::get_output_scaled(SRV_Channel::k_aileron); float elevator = SRV_Channels::get_output_scaled(SRV_Channel::k_elevator); float tilt_left = (elevator + aileron) * tailsitter.<API key>; float tilt_right = (elevator - aileron) * tailsitter.<API key>; SRV_Channels::set_output_scaled(SRV_Channel::k_tiltMotorLeft, tilt_left); SRV_Channels::set_output_scaled(SRV_Channel::k_tiltMotorRight, tilt_right); } else { SRV_Channels::set_output_scaled(SRV_Channel::k_tiltMotorLeft, 0); SRV_Channels::set_output_scaled(SRV_Channel::k_tiltMotorRight, 0); } return; } motors_output(); plane.pitchController.reset_I(); plane.rollController.reset_I(); if (tailsitter.vectored_hover_gain > 0) { // thrust vectoring VTOL modes float aileron = SRV_Channels::get_output_scaled(SRV_Channel::k_aileron); float elevator = SRV_Channels::get_output_scaled(SRV_Channel::k_elevator); float tilt_left = (elevator + aileron) * tailsitter.vectored_hover_gain; float tilt_right = (elevator - aileron) * tailsitter.vectored_hover_gain; SRV_Channels::set_output_scaled(SRV_Channel::k_tiltMotorLeft, tilt_left); SRV_Channels::set_output_scaled(SRV_Channel::k_tiltMotorRight, tilt_right); } if (tailsitter.input_mask_chan > 0 && tailsitter.input_mask > 0 && hal.rcin->read(tailsitter.input_mask_chan-1) > 1700) { // the user is learning to prop-hang if (tailsitter.input_mask & <API key>) { SRV_Channels::set_output_scaled(SRV_Channel::k_aileron, plane.channel_roll-><API key>()); } if (tailsitter.input_mask & <API key>) { SRV_Channels::set_output_scaled(SRV_Channel::k_elevator, plane.channel_pitch-><API key>()); } if (tailsitter.input_mask & <API key>) { SRV_Channels::set_output_scaled(SRV_Channel::k_throttle, plane.channel_throttle-><API key>()); } if (tailsitter.input_mask & <API key>) { SRV_Channels::set_output_scaled(SRV_Channel::k_rudder, plane.channel_rudder-><API key>()); } } } /* return true when we have completed enough of a transition to switch to fixed wing control */ bool QuadPlane::<API key>(void) { if (plane.fly_inverted()) { // transition immediately return true; } if (labs(ahrs_view->pitch_sensor) > tailsitter.transition_angle*100 || labs(ahrs_view->roll_sensor) > tailsitter.transition_angle*100 || AP_HAL::millis() - transition_start_ms > 2000) { return true; } // still waiting return false; } // handle different tailsitter input types void QuadPlane::<API key>(void) { if (tailsitter_active() && tailsitter.input_type == <API key>) { // the user has asked for body frame controls when tailsitter // is active. We switch around the control_in value for the // channels to do this, as that ensures the value is // consistent throughout the code int16_t roll_in = plane.channel_roll->get_control_in(); int16_t yaw_in = plane.channel_rudder->get_control_in(); plane.channel_roll->set_control_in(yaw_in); plane.channel_rudder->set_control_in(-roll_in); } }
#include "HeadSpin.h" #include "plComponent.h" #include "plComponentReg.h" #include "plMiscComponents.h" #include "MaxMain/plMaxNode.h" #include "resource.h" #include <iparamm2.h> #pragma hdrstop #include "MaxMain/plPlasmaRefMsgs.h" #include "pnSceneObject/plSceneObject.h" #include "pnSceneObject/<API key>.h" #include "pnSceneObject/plDrawInterface.h" #include "plMessage/plSimStateMsg.h" #include "pnMessage/plEnableMsg.h" #include "MaxMain/plPluginResManager.h" void <API key>() {} // Ignore Component //Class that accesses the paramblock below. class plIgnoreComponent : public plComponent { public: plIgnoreComponent(); // SetupProperties - Internal setup and write-only set properties on the MaxNode. No reading // of properties on the MaxNode, as it's still indeterminant. bool SetupProperties(plMaxNode *pNode, plErrorMsg *pErrMsg); bool Convert(plMaxNode *node, plErrorMsg *pErrMsg); virtual void CollectNonDrawables(INodeTab& nonDrawables); }; //Max desc stuff necessary below. CLASS_DESC(plIgnoreComponent, gIgnoreDesc, "Ignore", "Ignore", COMP_TYPE_IGNORE, Class_ID(0x48326288, 0x528a3dea)) enum { kIgnoreMeCheckBx }; ParamBlockDesc2 gIgnoreBk ( plComponent::kBlkComp, _T("Ignore"), 0, &gIgnoreDesc, P_AUTO_CONSTRUCT + P_AUTO_UI, plComponent::kRefComp, IDD_COMP_IGNORE, IDS_COMP_IGNORES, 0, 0, NULL, kIgnoreMeCheckBx, _T("Ignore"), TYPE_BOOL, 0, 0, p_default, TRUE, p_ui, TYPE_SINGLECHEKBOX, <API key>, end, end ); plIgnoreComponent::plIgnoreComponent() { fClassDesc = &gIgnoreDesc; fClassDesc->MakeAutoParamBlocks(this); } void plIgnoreComponent::CollectNonDrawables(INodeTab& nonDrawables) { if (fCompPB->GetInt(kIgnoreMeCheckBx)) { AddTargetsToList(nonDrawables); } } // SetupProperties - Internal setup and write-only set properties on the MaxNode. No reading // of properties on the MaxNode, as it's still indeterminant. bool plIgnoreComponent::SetupProperties(plMaxNode *pNode, plErrorMsg *pErrMsg) { if (fCompPB->GetInt(kIgnoreMeCheckBx)) pNode->SetCanConvert(false); return true; } bool plIgnoreComponent::Convert(plMaxNode *node, plErrorMsg *pErrMsg) { return true; } // IgnoreLite Component //Class that accesses the paramblock below. class <API key> : public plComponent { public: enum { kSelectedOnly }; enum LightState { kTurnOn, kTurnOff, kToggle }; public: <API key>(); void SetState(LightState s); // SetupProperties - Internal setup and write-only set properties on the MaxNode. No reading // of properties on the MaxNode, as it's still indeterminant. bool SetupProperties(plMaxNode *pNode, plErrorMsg *pErrMsg) { return true; } bool PreConvert(plMaxNode *pNode, plErrorMsg *pErrMsg) { return true; } bool Convert(plMaxNode *node, plErrorMsg *pErrMsg) { return true; } }; class plIgnoreLiteProc : public <API key> { public: BOOL DlgProc(TimeValue t, IParamMap2 *map, HWND hWnd, UINT msg, WPARAM wParam, LPARAM lParam) { switch (msg) { case WM_COMMAND: if( (HIWORD(wParam) == BN_CLICKED) && (LOWORD(wParam) == <API key>) ) { <API key>* ilc = (<API key>*)map->GetParamBlock()->GetOwner(); ilc->SetState(<API key>::kTurnOn); return TRUE; } if( (HIWORD(wParam) == BN_CLICKED) && (LOWORD(wParam) == <API key>) ) { <API key>* ilc = (<API key>*)map->GetParamBlock()->GetOwner(); ilc->SetState(<API key>::kTurnOff); return TRUE; } if( (HIWORD(wParam) == BN_CLICKED) && (LOWORD(wParam) == <API key>) ) { <API key>* ilc = (<API key>*)map->GetParamBlock()->GetOwner(); ilc->SetState(<API key>::kToggle); return TRUE; } break; } return false; } void DeleteThis() {} }; static plIgnoreLiteProc gIgnoreLiteProc; //Max desc stuff necessary below. CLASS_DESC(<API key>, gIgnoreLiteDesc, "Control Max Light", "ControlLite", COMP_TYPE_IGNORE, IGNORELITE_CID) ParamBlockDesc2 gIgnoreLiteBk ( plComponent::kBlkComp, _T("IgnoreLite"), 0, &gIgnoreLiteDesc, P_AUTO_CONSTRUCT + P_AUTO_UI, plComponent::kRefComp, IDD_COMP_IGNORELITE, <API key>, 0, 0, &gIgnoreLiteProc, <API key>::kSelectedOnly, _T("SelectedOnly"), TYPE_BOOL, 0, 0, p_default, FALSE, p_ui, TYPE_SINGLECHEKBOX, <API key>, end, end ); <API key>::<API key>() { fClassDesc = &gIgnoreLiteDesc; fClassDesc->MakeAutoParamBlocks(this); } void <API key>::SetState(LightState s) { BOOL selectedOnly = fCompPB->GetInt(kSelectedOnly); int numTarg = NumTargets(); int i; for( i = 0; i < numTarg; i++ ) { plMaxNodeBase* targ = GetTarget(i); if( targ ) { if( selectedOnly && !targ->Selected() ) continue; Object *obj = targ->EvalWorldState(TimeValue(0)).obj; if (obj && (obj->SuperClassID() == SClass_ID(LIGHT_CLASS_ID))) { LightObject* liObj = (LightObject*)obj; switch( s ) { case kTurnOn: liObj->SetUseLight(true); break; case kTurnOff: liObj->SetUseLight(false); break; case kToggle: liObj->SetUseLight(!liObj->GetUseLight()); break; } } } } } // Barney Component //Class that accesses the paramblock below. class plBarneyComponent : public plComponent { public: plBarneyComponent(); // SetupProperties - Internal setup and write-only set properties on the MaxNode. No reading // of properties on the MaxNode, as it's still indeterminant. bool SetupProperties(plMaxNode *pNode, plErrorMsg *pErrMsg); bool Convert(plMaxNode *node, plErrorMsg *pErrMsg); }; //Max desc stuff necessary below. CLASS_DESC(plBarneyComponent, gBarneyDesc, "Barney", "Barney", COMP_TYPE_IGNORE, Class_ID(0x376955dc, 0x2fec50ae)) ParamBlockDesc2 gBarneyBk ( plComponent::kBlkComp, _T("Barney"), 0, &gBarneyDesc, P_AUTO_CONSTRUCT + P_AUTO_UI, plComponent::kRefComp, IDD_COMP_BARNEY, IDS_COMP_BARNEYS, 0, 0, NULL, end ); plBarneyComponent::plBarneyComponent() { fClassDesc = &gBarneyDesc; fClassDesc->MakeAutoParamBlocks(this); } // SetupProperties - Internal setup and write-only set properties on the MaxNode. No reading // of properties on the MaxNode, as it's still indeterminant. bool plBarneyComponent::SetupProperties(plMaxNode *pNode, plErrorMsg *pErrMsg) { pNode->SetCanConvert(false); pNode->SetIsBarney(true); return true; } bool plBarneyComponent::Convert(plMaxNode *node, plErrorMsg *pErrMsg) { return true; } // NoShow Component //Class that accesses the paramblock below. class plNoShowComponent : public plComponent { public: enum { kShowable, kAffectDraw, kAffectPhys }; public: plNoShowComponent(); virtual void CollectNonDrawables(INodeTab& nonDrawables); // SetupProperties - Internal setup and write-only set properties on the MaxNode. No reading // of properties on the MaxNode, as it's still indeterminant. bool SetupProperties(plMaxNode *pNode, plErrorMsg *pErrMsg); bool Convert(plMaxNode *node, plErrorMsg *pErrMsg); }; const Class_ID COMP_NOSHOW_CID(0x41cb2b85, 0x615932c6); //Max desc stuff necessary below. CLASS_DESC(plNoShowComponent, gNoShowDesc, "NoShow", "NoShow", COMP_TYPE_IGNORE, COMP_NOSHOW_CID) ParamBlockDesc2 gNoShowBk ( plComponent::kBlkComp, _T("NoShow"), 0, &gNoShowDesc, P_AUTO_CONSTRUCT + P_AUTO_UI, plComponent::kRefComp, IDD_COMP_NOSHOW, IDS_COMP_NOSHOW, 0, 0, NULL, plNoShowComponent::kShowable, _T("Showable"), TYPE_BOOL, 0, 0, p_default, FALSE, p_ui, TYPE_SINGLECHEKBOX, <API key>, end, plNoShowComponent::kAffectDraw, _T("AffectDraw"), TYPE_BOOL, 0, 0, p_default, TRUE, p_ui, TYPE_SINGLECHEKBOX, <API key>, end, plNoShowComponent::kAffectPhys, _T("AffectPhys"), TYPE_BOOL, 0, 0, p_default, FALSE, p_ui, TYPE_SINGLECHEKBOX, <API key>, end, end ); plNoShowComponent::plNoShowComponent() { fClassDesc = &gNoShowDesc; fClassDesc->MakeAutoParamBlocks(this); } // SetupProperties - Internal setup and write-only set properties on the MaxNode. No reading // of properties on the MaxNode, as it's still indeterminant. bool plNoShowComponent::SetupProperties(plMaxNode *pNode, plErrorMsg *pErrMsg) { if( !fCompPB->GetInt(kShowable) ) { if( fCompPB->GetInt(kAffectDraw) ) pNode->SetDrawable(false); if( fCompPB->GetInt(kAffectPhys) ) pNode->SetPhysical(false); } return true; } bool plNoShowComponent::Convert(plMaxNode *node, plErrorMsg *pErrMsg) { plSceneObject* obj = node->GetSceneObject(); if( !obj ) return true; if( fCompPB->GetInt(kShowable) ) { if( fCompPB->GetInt(kAffectDraw) ) { plEnableMsg* eMsg = new plEnableMsg(nil, plEnableMsg::kDisable, plEnableMsg::kDrawable); eMsg->AddReceiver(obj->GetKey()); eMsg->Send(); } if( fCompPB->GetInt(kAffectPhys) ) { hsAssert(0, "Who uses this?"); // <API key>* pMsg = new <API key>; // pMsg->SetFlags(<API key>::kCollideOff | <API key>::kReportOff); // pMsg->AddReceiver(obj->GetKey()); // pMsg->Send(); } #if 0 plDrawInterface* di = node->GetDrawInterface(); if( di && { di->SetProperty(plDrawInterface::kDisable, true); } #endif } return true; } void plNoShowComponent::CollectNonDrawables(INodeTab& nonDrawables) { if( fCompPB->GetInt(kAffectDraw) ) AddTargetsToList(nonDrawables); }
#include "branchmodel.h" #include "gitclient.h" #include <utils/qtcassert.h> #include <vcsbase/vcsoutputwindow.h> #include <vcsbase/vcscommand.h> #include <QFont> using namespace VcsBase; namespace Git { namespace Internal { enum RootNodes { LocalBranches = 0, RemoteBranches = 1, Tags = 2 }; // BranchNode: class BranchNode { public: BranchNode() : parent(0), name(QLatin1String("<ROOT>")) { } BranchNode(const QString &n, const QString &s = QString(), const QString &t = QString()) : parent(0), name(n), sha(s), tracking(t) { } ~BranchNode() { while (!children.isEmpty()) delete children.first(); if (parent) parent->children.removeAll(this); } BranchNode *rootNode() const { return parent ? parent->rootNode() : const_cast<BranchNode *>(this); } int count() const { return children.count(); } bool isLeaf() const { return children.isEmpty() && parent && parent->parent; } bool childOf(BranchNode *node) const { if (this == node) return true; return parent ? parent->childOf(node) : false; } bool childOfRoot(RootNodes root) const { BranchNode *rn = rootNode(); if (rn->isLeaf()) return false; if (root >= rn->children.count()) return false; return childOf(rn->children.at(root)); } bool isTag() const { return childOfRoot(Tags); } bool isLocal() const { return childOfRoot(LocalBranches); } BranchNode *childOfName(const QString &name) const { for (int i = 0; i < children.count(); ++i) { if (children.at(i)->name == name) return children.at(i); } return 0; } QStringList fullName(bool includePrefix = false) const { QTC_ASSERT(isLeaf(), return QStringList()); QStringList fn; QList<const BranchNode *> nodes; const BranchNode *current = this; while (current->parent) { nodes.prepend(current); current = current->parent; } if (includePrefix) fn.append(nodes.first()->sha); nodes.removeFirst(); foreach (const BranchNode *n, nodes) fn.append(n->name); return fn; } void insert(const QStringList &path, BranchNode *n) { BranchNode *current = this; for (int i = 0; i < path.count(); ++i) { BranchNode *c = current->childOfName(path.at(i)); if (c) current = c; else current = current->append(new BranchNode(path.at(i))); } current->append(n); } BranchNode *append(BranchNode *n) { n->parent = this; children.append(n); return n; } QStringList childrenNames() const { if (children.count() > 0) { QStringList names; foreach (BranchNode *n, children) { names.append(n->childrenNames()); } return names; } return QStringList(fullName().join(QLatin1Char('/'))); } int rowOf(BranchNode *node) { return children.indexOf(node); } BranchNode *parent; QList<BranchNode *> children; QString name; QString sha; QString tracking; mutable QString toolTip; }; // BranchModel: BranchModel::BranchModel(GitClient *client, QObject *parent) : QAbstractItemModel(parent), m_client(client), m_rootNode(new BranchNode), m_currentBranch(0) { QTC_CHECK(m_client); // Abuse the sha field for ref prefix m_rootNode->append(new BranchNode(tr("Local Branches"), QLatin1String("refs/heads"))); m_rootNode->append(new BranchNode(tr("Remote Branches"), QLatin1String("refs/remotes"))); } BranchModel::~BranchModel() { delete m_rootNode; } QModelIndex BranchModel::index(int row, int column, const QModelIndex &parentIdx) const { if (column != 0) return QModelIndex(); BranchNode *parentNode = indexToNode(parentIdx); if (row >= parentNode->count()) return QModelIndex(); return nodeToIndex(parentNode->children.at(row)); } QModelIndex BranchModel::parent(const QModelIndex &index) const { if (!index.isValid()) return QModelIndex(); BranchNode *node = indexToNode(index); if (node->parent == m_rootNode) return QModelIndex(); return nodeToIndex(node->parent); } int BranchModel::rowCount(const QModelIndex &parentIdx) const { if (parentIdx.column() > 0) return 0; return indexToNode(parentIdx)->count(); } int BranchModel::columnCount(const QModelIndex &parent) const { Q_UNUSED(parent); return 1; } QVariant BranchModel::data(const QModelIndex &index, int role) const { BranchNode *node = indexToNode(index); if (!node) return QVariant(); switch (role) { case Qt::DisplayRole: { QString res = node->name; if (!node->tracking.isEmpty()) res += QLatin1String(" [") + node->tracking + QLatin1Char(']'); return res; } case Qt::EditRole: return node->name; case Qt::ToolTipRole: if (!node->isLeaf()) return QVariant(); if (node->toolTip.isEmpty()) node->toolTip = toolTip(node->sha); return node->toolTip; case Qt::FontRole: { QFont font; if (!node->isLeaf()) { font.setBold(true); } else if (node == m_currentBranch) { font.setBold(true); font.setUnderline(true); } return font; } default: return QVariant(); } } bool BranchModel::setData(const QModelIndex &index, const QVariant &value, int role) { if (role != Qt::EditRole) return false; BranchNode *node = indexToNode(index); if (!node) return false; const QString newName = value.toString(); if (newName.isEmpty()) return false; if (node->name == newName) return true; QStringList oldFullName = node->fullName(); node->name = newName; QStringList newFullName = node->fullName(); QString output; QString errorMessage; if (!m_client-><API key>(m_workingDirectory, QStringList() << QLatin1String("-m") << oldFullName.last() << newFullName.last(), &output, &errorMessage)) { node->name = oldFullName.last(); VcsOutputWindow::appendError(errorMessage); return false; } emit dataChanged(index, index); return true; } Qt::ItemFlags BranchModel::flags(const QModelIndex &index) const { BranchNode *node = indexToNode(index); if (!node) return Qt::NoItemFlags; if (node->isLeaf() && node->isLocal()) return Qt::ItemIsSelectable | Qt::ItemIsEditable | Qt::ItemIsEnabled; else return Qt::ItemIsSelectable | Qt::ItemIsEnabled; } void BranchModel::clear() { foreach (BranchNode *root, m_rootNode->children) while (root->count()) delete root->children.takeLast(); if (hasTags()) m_rootNode->children.takeLast(); m_currentBranch = 0; } bool BranchModel::refresh(const QString &workingDirectory, QString *errorMessage) { beginResetModel(); clear(); if (workingDirectory.isEmpty()) { endResetModel(); return false; } m_currentSha = m_client-><API key>(workingDirectory); QStringList args; args << QLatin1String("--format=%(objectname)\t%(refname)\t%(upstream:short)\t%(*objectname)"); QString output; if (!m_client-><API key>(workingDirectory, args, &output, errorMessage)) VcsOutputWindow::appendError(*errorMessage); m_workingDirectory = workingDirectory; const QStringList lines = output.split(QLatin1Char('\n')); foreach (const QString &l, lines) parseOutputLine(l); if (m_currentBranch) { if (m_currentBranch->parent == m_rootNode->children.at(LocalBranches)) m_currentBranch = 0; setCurrentBranch(); } endResetModel(); return true; } void BranchModel::setCurrentBranch() { QString currentBranch = m_client-><API key>(m_workingDirectory); if (currentBranch.isEmpty()) return; BranchNode *local = m_rootNode->children.at(LocalBranches); int pos = 0; for (pos = 0; pos < local->count(); ++pos) { if (local->children.at(pos)->name == currentBranch) m_currentBranch = local->children[pos]; } } void BranchModel::renameBranch(const QString &oldName, const QString &newName) { QString errorMessage; QString output; if (!m_client-><API key>(m_workingDirectory, QStringList() << QLatin1String("-m") << oldName << newName, &output, &errorMessage)) VcsOutputWindow::appendError(errorMessage); else refresh(m_workingDirectory, &errorMessage); } void BranchModel::renameTag(const QString &oldName, const QString &newName) { QString errorMessage; QString output; if (!m_client->synchronousTagCmd(m_workingDirectory, QStringList() << newName << oldName, &output, &errorMessage) || !m_client->synchronousTagCmd(m_workingDirectory, QStringList() << QLatin1String("-d") << oldName, &output, &errorMessage)) { VcsOutputWindow::appendError(errorMessage); } else { refresh(m_workingDirectory, &errorMessage); } } QString BranchModel::workingDirectory() const { return m_workingDirectory; } GitClient *BranchModel::client() const { return m_client; } QModelIndex BranchModel::currentBranch() const { if (!m_currentBranch) return QModelIndex(); return nodeToIndex(m_currentBranch); } QString BranchModel::fullName(const QModelIndex &idx, bool includePrefix) const { if (!idx.isValid()) return QString(); BranchNode *node = indexToNode(idx); if (!node || !node->isLeaf()) return QString(); QStringList path = node->fullName(includePrefix); return path.join(QLatin1Char('/')); } QStringList BranchModel::localBranchNames() const { if (!m_rootNode || !m_rootNode->count()) return QStringList(); return m_rootNode->children.at(LocalBranches)->childrenNames(); } QString BranchModel::sha(const QModelIndex &idx) const { if (!idx.isValid()) return QString(); BranchNode *node = indexToNode(idx); return node->sha; } bool BranchModel::hasTags() const { return m_rootNode->children.count() > Tags; } bool BranchModel::isLocal(const QModelIndex &idx) const { if (!idx.isValid()) return false; BranchNode *node = indexToNode(idx); return node->isLocal(); } bool BranchModel::isLeaf(const QModelIndex &idx) const { if (!idx.isValid()) return false; BranchNode *node = indexToNode(idx); return node->isLeaf(); } bool BranchModel::isTag(const QModelIndex &idx) const { if (!idx.isValid() || !hasTags()) return false; return indexToNode(idx)->isTag(); } void BranchModel::removeBranch(const QModelIndex &idx) { QString branch = fullName(idx); if (branch.isEmpty()) return; QString errorMessage; QString output; QStringList args; args << QLatin1String("-D") << branch; if (!m_client-><API key>(m_workingDirectory, args, &output, &errorMessage)) { VcsOutputWindow::appendError(errorMessage); return; } removeNode(idx); } void BranchModel::removeTag(const QModelIndex &idx) { QString tag = fullName(idx); if (tag.isEmpty()) return; QString errorMessage; QString output; QStringList args; args << QLatin1String("-d") << tag; if (!m_client->synchronousTagCmd(m_workingDirectory, args, &output, &errorMessage)) { VcsOutputWindow::appendError(errorMessage); return; } removeNode(idx); } void BranchModel::checkoutBranch(const QModelIndex &idx) { QString branch = fullName(idx, !isLocal(idx)); if (branch.isEmpty()) return; // No StashGuard since this function for now is only used with clean working dir. // If it is ever used from another place, please add StashGuard here m_client->synchronousCheckout(m_workingDirectory, branch); } bool BranchModel::branchIsMerged(const QModelIndex &idx) { QString branch = fullName(idx); if (branch.isEmpty()) return false; QString errorMessage; QString output; QStringList args; args << QLatin1String("-a") << QLatin1String("--contains") << sha(idx); if (!m_client-><API key>(m_workingDirectory, args, &output, &errorMessage)) VcsOutputWindow::appendError(errorMessage); QStringList lines = output.split(QLatin1Char('\n'), QString::SkipEmptyParts); foreach (const QString &l, lines) { QString currentBranch = l.mid(2); // remove first letters (those are either // " " or "* " depending on whether it is // the currently checked out branch or not) if (currentBranch != branch) return true; } return false; } static int positionForName(BranchNode *node, const QString &name) { int pos = 0; for (pos = 0; pos < node->count(); ++pos) { if (node->children.at(pos)->name >= name) break; } return pos; } QModelIndex BranchModel::addBranch(const QString &name, bool track, const QModelIndex &startPoint) { if (!m_rootNode || !m_rootNode->count()) return QModelIndex(); const QString trackedBranch = fullName(startPoint); const QString fullTrackedBranch = fullName(startPoint, true); QString startSha; QString output; QString errorMessage; QStringList args; args << (track ? QLatin1String("--track") : QLatin1String("--no-track")); args << name; if (!fullTrackedBranch.isEmpty()) { args << fullTrackedBranch; startSha = sha(startPoint); } else { startSha = m_client-><API key>(m_workingDirectory); } if (!m_client-><API key>(m_workingDirectory, args, &output, &errorMessage)) { VcsOutputWindow::appendError(errorMessage); return QModelIndex(); } BranchNode *local = m_rootNode->children.at(LocalBranches); const int slash = name.indexOf(QLatin1Char('/')); const QString leafName = slash == -1 ? name : name.mid(slash + 1); bool added = false; if (slash != -1) { const QString nodeName = name.left(slash); int pos = positionForName(local, nodeName); BranchNode *child = (pos == local->count()) ? 0 : local->children.at(pos); if (!child || child->name != nodeName) { child = new BranchNode(nodeName); beginInsertRows(nodeToIndex(local), pos, pos); added = true; child->parent = local; local->children.insert(pos, child); } local = child; } int pos = positionForName(local, leafName); auto newNode = new BranchNode(leafName, startSha, track ? trackedBranch : QString()); if (!added) beginInsertRows(nodeToIndex(local), pos, pos); newNode->parent = local; local->children.insert(pos, newNode); endInsertRows(); return nodeToIndex(newNode); } void BranchModel::setRemoteTracking(const QModelIndex &trackingIndex) { QModelIndex current = currentBranch(); QTC_ASSERT(current.isValid(), return); const QString currentName = fullName(current); const QString shortTracking = fullName(trackingIndex); const QString tracking = fullName(trackingIndex, true); m_client-><API key>(m_workingDirectory, currentName, tracking); m_currentBranch->tracking = shortTracking; emit dataChanged(current, current); } void BranchModel::parseOutputLine(const QString &line) { if (line.size() < 3) return; QStringList lineParts = line.split(QLatin1Char('\t')); const QString shaDeref = lineParts.at(3); const QString sha = shaDeref.isEmpty() ? lineParts.at(0) : shaDeref; const QString fullName = lineParts.at(1); bool current = (sha == m_currentSha); bool showTags = m_client->settings().boolValue(GitSettings::showTagsKey); // insert node into tree: QStringList nameParts = fullName.split(QLatin1Char('/')); nameParts.removeFirst(); // remove refs... BranchNode *root = 0; if (nameParts.first() == QLatin1String("heads")) { root = m_rootNode->children.at(LocalBranches); } else if (nameParts.first() == QLatin1String("remotes")) { root = m_rootNode->children.at(RemoteBranches); } else if (showTags && nameParts.first() == QLatin1String("tags")) { if (!hasTags()) // Tags is missing, add it m_rootNode->append(new BranchNode(tr("Tags"), QLatin1String("refs/tags"))); root = m_rootNode->children.at(Tags); } else { return; } nameParts.removeFirst(); // limit depth of list. Git basically only ever wants one / and considers the rest as part of // the name. while (nameParts.count() > 3) { nameParts[2] = nameParts.at(2) + QLatin1Char('/') + nameParts.at(3); nameParts.removeAt(3); } const QString name = nameParts.last(); nameParts.removeLast(); auto newNode = new BranchNode(name, sha, lineParts.at(2)); root->insert(nameParts, newNode); if (current) m_currentBranch = newNode; } BranchNode *BranchModel::indexToNode(const QModelIndex &index) const { if (index.column() > 0) return 0; if (!index.isValid()) return m_rootNode; return static_cast<BranchNode *>(index.internalPointer()); } QModelIndex BranchModel::nodeToIndex(BranchNode *node) const { if (node == m_rootNode) return QModelIndex(); return createIndex(node->parent->rowOf(node), 0, static_cast<void *>(node)); } void BranchModel::removeNode(const QModelIndex &idx) { QModelIndex nodeIndex = idx; // idx is a leaf, so count must be 0. BranchNode *node = indexToNode(nodeIndex); while (node->count() == 0 && node->parent != m_rootNode) { BranchNode *parentNode = node->parent; const QModelIndex parentIndex = nodeToIndex(parentNode); const int nodeRow = nodeIndex.row(); beginRemoveRows(parentIndex, nodeRow, nodeRow); parentNode->children.removeAt(nodeRow); delete node; endRemoveRows(); node = parentNode; nodeIndex = parentIndex; } } QString BranchModel::toolTip(const QString &sha) const { // Show the sha description excluding diff as toolTip QString output; QString errorMessage; QStringList arguments(QLatin1String("-n1")); arguments << sha; if (!m_client->synchronousLog(m_workingDirectory, arguments, &output, &errorMessage, VcsCommand::<API key>)) { return errorMessage; } return output; } } // namespace Internal } // namespace Git
-- Account initialisation script for Oracle Trust Service tablespace -- <![CDATA[Usage: sqlplus / as sysdba @<API key>.sql ]]> drop user trust cascade; create user trust identified by trust default tablespace TRUST_SERVICE temporary tablespace temp quota unlimited on TRUST_SERVICE ; grant connect to trust; grant create sequence to trust; grant create view to trust; grant alter session to trust; grant create table to trust; -- XA DataSource support GRANT SELECT ON sys.<API key> TO trust; GRANT SELECT ON sys.pending_trans$ TO trust; GRANT SELECT ON sys.dba_2pc_pending TO trust; -- for Oracle 10g R2 with patch for bug 5945463 applied and higher: -- GRANT EXECUTE ON sys.dbms_xa TO trust; -- else GRANT EXECUTE ON sys.dbms_system TO trust;
#include "ioAbc.h" <API key> DECLARATIONS FUNCTION DEFINITIONS Abc_Ntk_t * Io_ReadBaf( char * pFileName, int fCheck ) { ProgressBar * pProgress; FILE * pFile; Vec_Ptr_t * vNodes; Abc_Obj_t * pObj, * pNode0, * pNode1; Abc_Ntk_t * pNtkNew; int nInputs, nOutputs, nLatches, nAnds, nFileSize, Num, i; char * pContents, * pName, * pCur; unsigned * pBufferNode; int RetValue; // read the file into the buffer nFileSize = Extra_FileSize( pFileName ); pFile = fopen( pFileName, "rb" ); pContents = ABC_ALLOC( char, nFileSize ); RetValue = fread( pContents, nFileSize, 1, pFile ); fclose( pFile ); // skip the comments (comment lines begin with '#' and end with '\n') for ( pCur = pContents; *pCur == ' while ( *pCur++ != '\n' ); // read the name pName = pCur; while ( *pCur++ ); // read the number of inputs nInputs = atoi( pCur ); while ( *pCur++ ); // read the number of outputs nOutputs = atoi( pCur ); while ( *pCur++ ); // read the number of latches nLatches = atoi( pCur ); while ( *pCur++ ); // read the number of nodes nAnds = atoi( pCur ); while ( *pCur++ ); // allocate the empty AIG pNtkNew = Abc_NtkAlloc( ABC_NTK_STRASH, ABC_FUNC_AIG, 1 ); pNtkNew->pName = Extra_UtilStrsav( pName ); pNtkNew->pSpec = Extra_UtilStrsav( pFileName ); // prepare the array of nodes vNodes = Vec_PtrAlloc( 1 + nInputs + nLatches + nAnds ); Vec_PtrPush( vNodes, Abc_AigConst1(pNtkNew) ); // create the PIs for ( i = 0; i < nInputs; i++ ) { pObj = Abc_NtkCreatePi(pNtkNew); Abc_ObjAssignName( pObj, pCur, NULL ); while ( *pCur++ ); Vec_PtrPush( vNodes, pObj ); } // create the POs for ( i = 0; i < nOutputs; i++ ) { pObj = Abc_NtkCreatePo(pNtkNew); Abc_ObjAssignName( pObj, pCur, NULL ); while ( *pCur++ ); } // create the latches for ( i = 0; i < nLatches; i++ ) { pObj = Abc_NtkCreateLatch(pNtkNew); Abc_ObjAssignName( pObj, pCur, NULL ); while ( *pCur++ ); pNode0 = Abc_NtkCreateBi(pNtkNew); Abc_ObjAssignName( pNode0, pCur, NULL ); while ( *pCur++ ); pNode1 = Abc_NtkCreateBo(pNtkNew); Abc_ObjAssignName( pNode1, pCur, NULL ); while ( *pCur++ ); Vec_PtrPush( vNodes, pNode1 ); Abc_ObjAddFanin( pObj, pNode0 ); Abc_ObjAddFanin( pNode1, pObj ); } // get the pointer to the beginning of the node array pBufferNode = (unsigned *)(pContents + (nFileSize - (2 * nAnds + nOutputs + nLatches) * sizeof(int)) ); // make sure we are at the place where the nodes begin if ( pBufferNode != (unsigned *)pCur ) { ABC_FREE( pContents ); Vec_PtrFree( vNodes ); Abc_NtkDelete( pNtkNew ); printf( "Warning: Internal reader error.\n" ); return NULL; } // create the AND gates pProgress = <API key>( stdout, nAnds ); for ( i = 0; i < nAnds; i++ ) { <API key>( pProgress, i, NULL ); pNode0 = Abc_ObjNotCond( (Abc_Obj_t *)Vec_PtrEntry(vNodes, pBufferNode[2*i+0] >> 1), pBufferNode[2*i+0] & 1 ); pNode1 = Abc_ObjNotCond( (Abc_Obj_t *)Vec_PtrEntry(vNodes, pBufferNode[2*i+1] >> 1), pBufferNode[2*i+1] & 1 ); Vec_PtrPush( vNodes, Abc_AigAnd((Abc_Aig_t *)pNtkNew->pManFunc, pNode0, pNode1) ); } <API key>( pProgress ); // read the POs Abc_NtkForEachCo( pNtkNew, pObj, i ) { Num = pBufferNode[2*nAnds+i]; if ( Abc_ObjFanoutNum(pObj) > 0 && Abc_ObjIsLatch(Abc_ObjFanout0(pObj)) ) { Abc_ObjSetData( Abc_ObjFanout0(pObj), (void *)(ABC_PTRINT_T)(Num & 3) ); Num >>= 2; } pNode0 = Abc_ObjNotCond( (Abc_Obj_t *)Vec_PtrEntry(vNodes, Num >> 1), Num & 1 ); Abc_ObjAddFanin( pObj, pNode0 ); } ABC_FREE( pContents ); Vec_PtrFree( vNodes ); // remove the extra nodes // Abc_AigCleanup( (Abc_Aig_t *)pNtkNew->pManFunc ); // check the result if ( fCheck && !Abc_NtkCheckRead( pNtkNew ) ) { printf( "Io_ReadBaf: The network check has failed.\n" ); Abc_NtkDelete( pNtkNew ); return NULL; } return pNtkNew; } END OF FILE <API key>
package com.github.bordertech.wcomponents.examples; import com.github.bordertech.wcomponents.RadioButtonGroup; import com.github.bordertech.wcomponents.Size; import com.github.bordertech.wcomponents.WLabel; import com.github.bordertech.wcomponents.WPanel; import com.github.bordertech.wcomponents.WRadioButton; import com.github.bordertech.wcomponents.layout.FlowLayout; import com.github.bordertech.wcomponents.layout.FlowLayout.Alignment; /** * {@link WRadioButton} example. * * @author Yiannis Paschalidis * @since 1.0.0 */ public class RadioButtonExample extends WPanel { /** * Creates a RadioButtonExample. */ public RadioButtonExample() { this.setLayout(new FlowLayout(Alignment.VERTICAL)); WPanel panel = new WPanel(); RadioButtonGroup group1 = new RadioButtonGroup(); panel.add(group1); WRadioButton rb1 = group1.addRadioButton(1); panel.add(new WLabel("Default", rb1)); panel.add(rb1); this.add(panel); panel = new WPanel(); RadioButtonGroup group2 = new RadioButtonGroup(); panel.add(group2); WRadioButton rb2 = group2.addRadioButton(1); rb2.setSelected(true); panel.add(new WLabel("Initially selected", rb2)); panel.add(rb2); this.add(panel); panel = new WPanel(); RadioButtonGroup group3 = new RadioButtonGroup(); panel.add(group3); WRadioButton rb3 = group3.addRadioButton(1); rb3.setDisabled(true); rb3.setToolTip("This is disabled."); panel.add(new WLabel("Disabled", rb3)); panel.add(rb3); this.add(panel); RadioButtonGroup group = new RadioButtonGroup(); WRadioButton rb4 = group.addRadioButton("A"); WRadioButton rb5 = group.addRadioButton("B"); WRadioButton rb6 = group.addRadioButton("C"); panel = new WPanel(); panel.setLayout(new FlowLayout(Alignment.LEFT, Size.MEDIUM)); add(new WLabel("Group")); panel.add(new WLabel("A", rb4)); panel.add(rb4); panel.add(new WLabel("B", rb5)); panel.add(rb5); panel.add(new WLabel("C", rb6)); panel.add(rb6); panel.add(group); this.add(panel); } }
package org.craftercms.studio.impl.v1.service.activity; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.*; import net.sf.json.JSONObject; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.craftercms.commons.validation.annotations.param.<API key>; import org.craftercms.commons.validation.annotations.param.ValidateParams; import org.craftercms.commons.validation.annotations.param.<API key>; import org.craftercms.commons.validation.annotations.param.ValidateStringParam; import org.craftercms.studio.api.v1.constant.StudioConstants; import org.craftercms.studio.api.v1.constant.DmConstants; import org.craftercms.studio.api.v1.dal.AuditFeed; import org.craftercms.studio.api.v1.dal.AuditFeedMapper; import org.craftercms.studio.api.v1.exception.ServiceException; import org.craftercms.studio.api.v1.exception.<API key>; import org.craftercms.studio.api.v1.log.Logger; import org.craftercms.studio.api.v1.log.LoggerFactory; import org.craftercms.studio.api.v1.service.<API key>; import org.craftercms.studio.api.v1.service.activity.ActivityService; import org.craftercms.studio.api.v1.service.content.ContentService; import org.craftercms.studio.api.v1.service.deployment.DeploymentService; import org.craftercms.studio.api.v1.service.objectstate.State; import org.craftercms.studio.api.v1.service.site.SiteService; import org.craftercms.studio.api.v1.to.ContentItemTO; import org.craftercms.studio.api.v1.util.DebugUtils; import org.craftercms.studio.api.v1.util.StudioConfiguration; import org.springframework.beans.factory.annotation.Autowired; import org.craftercms.studio.api.v1.service.security.SecurityService; import static org.craftercms.studio.api.v1.constant.StudioConstants.CONTENT_TYPE_PAGE; import static org.craftercms.studio.api.v1.util.StudioConfiguration.<API key>; public class ActivityServiceImpl extends <API key> implements ActivityService { private static final Logger logger = LoggerFactory.getLogger(ActivityServiceImpl.class); protected static final int MAX_LEN_USER_ID = 255; // needs to match schema: // feed_user_id, // post_user_id protected static final int MAX_LEN_SITE_ID = 255; // needs to match schema: // site_network protected static final int <API key> = 255; // needs to match // schema: // activity_type protected static final int <API key> = 4000; // needs to match // schema: // activity_data protected static final int MAX_LEN_APP_TOOL_ID = 36; // needs to match // schema: app_tool /** activity post properties **/ protected static final String <API key> = "activitySummary"; protected static final String ACTIVITY_PROP_ID = "id"; protected static final String <API key> = "postDate"; protected static final String ACTIVITY_PROP_USER = "user"; protected static final String <API key> = "feedUserId"; protected static final String <API key> = "contentId"; /** activity feed format **/ protected static final String <API key> = "json"; @Autowired protected AuditFeedMapper auditFeedMapper; protected SiteService siteService; protected ContentService contentService; protected SecurityService securityService; protected StudioConfiguration studioConfiguration; protected DeploymentService deploymentService; @Override public void register() { getServicesManager().registerService(ActivityService.class, this); } @Override @ValidateParams public void postActivity(@ValidateStringParam(name = "site") String site, @ValidateStringParam(name = "user") String user, @<API key>(name = "contentId") String contentId, ActivityType activity, ActivitySource source, Map<String,String> extraInfo) { JSONObject activityPost = new JSONObject(); activityPost.put(ACTIVITY_PROP_USER, user); activityPost.put(ACTIVITY_PROP_ID, contentId); if (extraInfo != null) { activityPost.putAll(extraInfo); } String contentType = null; if (extraInfo != null) { contentType = extraInfo.get(DmConstants.KEY_CONTENT_TYPE); } postActivity(activity.toString(), source.toString(), site, null, activityPost.toString(),contentId,contentType, user); } private void postActivity(String activityType, String activitySource, String siteNetwork, String appTool, String activityData, String contentId, String contentType, String approver) { String currentUser = (StringUtils.isEmpty(approver)) ? securityService.getCurrentUser() : approver; try { // optional - default to empty string if (siteNetwork == null) { siteNetwork = ""; } else if (siteNetwork.length() > MAX_LEN_SITE_ID) { throw new ServiceException("Invalid site network - exceeds " + MAX_LEN_SITE_ID + " chars: " + siteNetwork); } // optional - default to empty string if (appTool == null) { appTool = ""; } else if (appTool.length() > MAX_LEN_APP_TOOL_ID) { throw new ServiceException("Invalid app tool - exceeds " + MAX_LEN_APP_TOOL_ID + " chars: " + appTool); } // required if (StringUtils.isEmpty(activityType)) { throw new ServiceException("Invalid activity type - activity type is empty"); } else if (activityType.length() > <API key>) { throw new ServiceException("Invalid activity type - exceeds " + <API key> + " chars: " + activityType); } // optional - default to empty string if (activityData == null) { activityData = ""; } else if (activityType.length() > <API key>) { throw new ServiceException("Invalid activity data - exceeds " + <API key> + " chars: " + activityData); } // required if (StringUtils.isEmpty(currentUser)) { throw new ServiceException("Invalid user - user is empty"); } else if (currentUser.length() > MAX_LEN_USER_ID) { throw new ServiceException("Invalid user - exceeds " + MAX_LEN_USER_ID + " chars: " + currentUser); } else { // user names are not case-sensitive currentUser = currentUser.toLowerCase(); } if (contentType == null) { contentType = CONTENT_TYPE_PAGE; } } catch (ServiceException e) { // log error and throw exception logger.error("Error in getting feeds", e); } try { ZonedDateTime postDate = ZonedDateTime.now(ZoneOffset.UTC); AuditFeed activityPost = new AuditFeed(); activityPost.setUserId(currentUser); activityPost.setSiteNetwork(siteNetwork); activityPost.setSummary(activityData); activityPost.setType(activityType); activityPost.setCreationDate(postDate); activityPost.setModifiedDate(postDate); activityPost.setSummaryFormat("json"); activityPost.setContentId(contentId); activityPost.setContentType(contentType); activityPost.setSource(activitySource); try { activityPost.setCreationDate(ZonedDateTime.now(ZoneOffset.UTC)); long postId = insertFeedEntry(activityPost); activityPost.setId(postId); logger.debug("Posted: " + activityPost); } catch (Exception e) { throw new ServiceException("Failed to post activity: " + e, e); } } catch (ServiceException e) { // log error, subsume exception (for post activity) logger.error("Error in posting feed", e); } } private long insertFeedEntry(AuditFeed activityFeed) { DebugUtils.addDebugStack(logger); logger.debug("Insert activity " + activityFeed.getContentId()); Long id = auditFeedMapper.insertActivityFeed(activityFeed); return (id != null ? id : -1); } @Override @ValidateParams public void renameContentId(@ValidateStringParam(name = "site") String site, @<API key>(name = "oldUrl") String oldUrl, @<API key>(name = "newUrl") String newUrl) { DebugUtils.addDebugStack(logger); logger.debug("Rename " + oldUrl + " to " + newUrl); Map<String, String> params = new HashMap<String, String>(); params.put("newPath", newUrl); params.put("site", site); params.put("oldPath", oldUrl); auditFeedMapper.renameContent(params); } @Override @ValidateParams public List<ContentItemTO> getActivities(@ValidateStringParam(name = "site") String site, @ValidateStringParam(name = "user") String user, @<API key>(name = "num") int num, @ValidateStringParam(name = "sort") String sort, boolean ascending, boolean excludeLive, @ValidateStringParam(name = "filterType") String filterType) throws ServiceException { int startPos = 0; List<ContentItemTO> contentItems = new ArrayList<ContentItemTO>(); boolean hasMoreItems = true; while(contentItems.size() < num && hasMoreItems){ int remainingItems = num - contentItems.size(); hasMoreItems = getActivityFeeds(user, site, startPos, num , filterType, excludeLive,contentItems,remainingItems); startPos = startPos+num; } if(contentItems.size() > num){ return contentItems.subList(0, num); } return contentItems; } /** * * Returns all non-live items if hideLiveItems is true, else should return all feeds back * */ protected boolean getActivityFeeds(String user, String site,int startPos, int size, String filterType,boolean hideLiveItems,List<ContentItemTO> contentItems,int remainingItem){ List<String> activityFeedEntries = new ArrayList<String>(); if (!<API key>()) { user = user.toLowerCase(); } List<AuditFeed> activityFeeds = null; activityFeeds = <API key>(user, <API key>, site, startPos, size, filterType, hideLiveItems); for (AuditFeed activityFeed : activityFeeds) { activityFeedEntries.add(activityFeed.getJSONString()); } boolean hasMoreItems=true; //if number of items returned is less than size it means that table has no more records if(activityFeedEntries.size()<size){ hasMoreItems=false; } if (activityFeedEntries != null && activityFeedEntries.size() > 0) { for (int index = 0; index < activityFeedEntries.size() && remainingItem!=0; index++) { JSONObject feedObject = JSONObject.fromObject(activityFeedEntries.get(index)); String id = (feedObject.containsKey(<API key>)) ? feedObject.getString(<API key>) : ""; ContentItemTO item = createActivityItem(site, feedObject, id); item.published = true; item.setPublished(true); ZonedDateTime pubDate = deploymentService.<API key>(site, id); item.publishedDate = pubDate; item.setPublishedDate(pubDate); contentItems.add(item); remainingItem } } logger.debug("Total Item post live filter : " + contentItems.size() + " hasMoreItems : "+hasMoreItems); return hasMoreItems; } /** * create an activity from the given feed * * @param site * @param feedObject * @return activity */ protected ContentItemTO createActivityItem(String site, JSONObject feedObject, String id) { try { ContentItemTO item = contentService.getContentItem(site, id, 0); if(item == null || item.isDeleted()) { item = contentService.<API key>(site, id); String modifier = (feedObject.containsKey(<API key>)) ? feedObject.getString(<API key>) : ""; if(modifier != null && !modifier.isEmpty()) { item.user = modifier; } String activitySummary = (feedObject.containsKey(<API key>)) ? feedObject.getString(<API key>) : ""; JSONObject summaryObject = JSONObject.fromObject(activitySummary); if (summaryObject.containsKey(DmConstants.KEY_CONTENT_TYPE)) { String contentType = (String)summaryObject.get(DmConstants.KEY_CONTENT_TYPE); item.contentType = contentType; } if(summaryObject.containsKey(StudioConstants.INTERNAL_NAME)) { String internalName = (String)summaryObject.get(StudioConstants.INTERNAL_NAME); item.internalName = internalName; } if(summaryObject.containsKey(StudioConstants.BROWSER_URI)) { String browserUri = (String)summaryObject.get(StudioConstants.BROWSER_URI); item.browserUri = browserUri; } item.setLockOwner(""); } String postDate = (feedObject.containsKey(<API key>)) ? feedObject.getString(<API key>) : ""; ZonedDateTime editedDate = ZonedDateTime.parse(postDate); if (editedDate != null) { item.eventDate = editedDate.withZoneSameInstant(ZoneOffset.UTC); } else { item.eventDate = editedDate; } return item; } catch (Exception e) { logger.error("Error fetching content item for [" + id + "]", e.getMessage()); return null; } } private List<AuditFeed> <API key>(String feedUserId, String format, String siteId, int startPos, int feedSize, String contentType, boolean hideLiveItems) { HashMap<String,Object> params = new HashMap<String,Object>(); params.put("userId",feedUserId); params.put("summaryFormat",format); params.put("siteNetwork",siteId); params.put("startPos", startPos); params.put("feedSize", feedSize); params.put("activities", Arrays.asList(ActivityType.CREATED, ActivityType.DELETED, ActivityType.UPDATED, ActivityType.MOVED)); if(StringUtils.isNotEmpty(contentType) && !contentType.toLowerCase().equals("all")){ params.put("contentType",contentType.toLowerCase()); } if (hideLiveItems) { List<String> statesValues = new ArrayList<String>(); for (State state : State.LIVE_STATES) { statesValues.add(state.name()); } params.put("states", statesValues); return auditFeedMapper.<API key>(params); } else { return auditFeedMapper.<API key>(params); } } @Override @ValidateParams public AuditFeed getDeletedActivity(@ValidateStringParam(name = "site") String site, @<API key>(name = "path") String path) { HashMap<String,String> params = new HashMap<String,String>(); params.put("contentId", path); params.put("siteNetwork", site); String activityType = ActivityType.DELETED.toString(); params.put("activityType", activityType); return auditFeedMapper.getDeletedActivity(params); } @Override @ValidateParams public void <API key>(@ValidateStringParam(name = "site") String site) { Map<String, String> params = new HashMap<String, String>(); params.put("site", site); auditFeedMapper.<API key>(params); } @Override @ValidateParams public List<AuditFeed> getAuditLogForSite(@ValidateStringParam(name = "site") String site, @<API key>(name = "start") int start, @<API key>(name = "number") int number, @ValidateStringParam(name = "user") String user, List<String> actions) throws <API key> { if (!siteService.exists(site)) { throw new <API key>(); } else { Map<String, Object> params = new HashMap<String, Object>(); params.put("site", site); params.put("start", start); params.put("number", number); if (StringUtils.isNotEmpty(user)) { params.put("user", user); } if (CollectionUtils.isNotEmpty(actions)) { params.put("actions", actions); } return auditFeedMapper.getAuditLogForSite(params); } } @Override @ValidateParams public long <API key>(@ValidateStringParam(name = "site") String site, @ValidateStringParam(name = "user") String user, List<String> actions) throws <API key> { if (!siteService.exists(site)) { throw new <API key>(); } else { Map<String, Object> params = new HashMap<String, Object>(); params.put("site", site); if (StringUtils.isNotEmpty(user)) { params.put("user", user); } if (CollectionUtils.isNotEmpty(actions)) { params.put("actions", actions); } return auditFeedMapper.<API key>(params); } } public boolean <API key>() { boolean toReturn = Boolean.parseBoolean(studioConfiguration.getProperty(<API key>)); return toReturn; } public SiteService getSiteService() { return siteService; } public void setSiteService(final SiteService siteService) { this.siteService = siteService; } public void setContentService(ContentService contentService) { this.contentService = contentService; } public SecurityService getSecurityService() {return securityService; } public void setSecurityService(SecurityService securityService) { this.securityService = securityService; } public StudioConfiguration <API key>() { return studioConfiguration; } public void <API key>(StudioConfiguration studioConfiguration) { this.studioConfiguration = studioConfiguration; } public DeploymentService <API key>() { return deploymentService; } public void <API key>(DeploymentService deploymentService) { this.deploymentService = deploymentService; } }
package org.thoughtcrime.securesms.testutil; import org.signal.core.util.logging.Log; public final class SystemOutLogger extends Log.Logger { @Override public void v(String tag, String message, Throwable t, boolean keepLonger) { printlnFormatted('v', tag, message, t); } @Override public void d(String tag, String message, Throwable t, boolean keepLonger) { printlnFormatted('d', tag, message, t); } @Override public void i(String tag, String message, Throwable t, boolean keepLonger) { printlnFormatted('i', tag, message, t); } @Override public void w(String tag, String message, Throwable t, boolean keepLonger) { printlnFormatted('w', tag, message, t); } @Override public void e(String tag, String message, Throwable t, boolean keepLonger) { printlnFormatted('e', tag, message, t); } @Override public void flush() { } private void printlnFormatted(char level, String tag, String message, Throwable t) { System.out.println(format(level, tag, message, t)); } private String format(char level, String tag, String message, Throwable t) { if (t != null) { return String.format("%c[%s] %s %s:%s", level, tag, message, t.getClass().getSimpleName(), t.getMessage()); } else { return String.format("%c[%s] %s", level, tag, message); } } }
// Openbravo POS is a point of sales application designed for touch screens. // This file is part of Openbravo POS. // Openbravo POS is free software: you can redistribute it and/or modify // (at your option) any later version. // Openbravo POS is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the package com.openbravo.pos.printer.escpos; public class CodesIthaca extends Codes { private static final byte[] INITSEQUENCE = {}; private static final byte[] CHAR_SIZE_0 = {0x1D, 0x21, 0x00}; private static final byte[] CHAR_SIZE_1 = {0x1D, 0x21, 0x01}; private static final byte[] CHAR_SIZE_2 = {0x1D, 0x21, 0x30}; private static final byte[] CHAR_SIZE_3 = {0x1D, 0x21, 0x31}; public static final byte[] BOLD_SET = {0x1B, 0x45, 0x01}; public static final byte[] BOLD_RESET = {0x1B, 0x45, 0x00}; public static final byte[] UNDERLINE_SET = {0x1B, 0x2D, 0x01}; public static final byte[] UNDERLINE_RESET = {0x1B, 0x2D, 0x00}; private static final byte[] OPEN_DRAWER = {0x1B, 0x78, 0x01}; private static final byte[] PARTIAL_CUT = {0x1B, 0x50, 0x00}; private static final byte[] IMAGE_HEADER = {0x1D, 0x76, 0x30, 0x03}; private static final byte[] NEW_LINE = {0x0D, 0x0A}; // Print and carriage return /** Creates a new instance of CodesIthaca */ public CodesIthaca() { } public byte[] getInitSequence() { return INITSEQUENCE; } public byte[] getSize0() { return CHAR_SIZE_0; } public byte[] getSize1() { return CHAR_SIZE_1; } public byte[] getSize2() { return CHAR_SIZE_2; } public byte[] getSize3() { return CHAR_SIZE_3; } public byte[] getBoldSet() { return BOLD_SET; } public byte[] getBoldReset() { return BOLD_RESET; } public byte[] getUnderlineSet() { return UNDERLINE_SET; } public byte[] getUnderlineReset() { return UNDERLINE_RESET; } public byte[] getOpenDrawer() { return OPEN_DRAWER; } public byte[] getCutReceipt() { return PARTIAL_CUT; } public byte[] getNewLine() { return NEW_LINE; } public byte[] getImageHeader() { return IMAGE_HEADER; } public int getImageWidth() { return 256; } }
#ifndef UTIL_BIT_PACKING__ #define UTIL_BIT_PACKING__ /* Bit-level packing routines */ #include <assert.h> #ifdef __APPLE__ #include <architecture/byte_order.h> #elif __linux__ #include <endian.h> #else #include <arpa/nameser_compat.h> #endif #include <inttypes.h> namespace util { /* WARNING WARNING WARNING: * The write functions assume that memory is zero initially. This makes them * faster and is the appropriate case for mmapped language model construction. * These routines assume that unaligned access to uint64_t is fast and that * storage is little endian. This is the case on x86_64. I'm not sure how * fast unaligned 64-bit access is on x86 but my target audience is large * language models for which 64-bit is necessary. * * Call the BitPackingSanity function to sanity check. Calling once suffices, * but it may be called multiple times when that's inconvenient. */ // Fun fact: __BYTE_ORDER is wrong on Solaris Sparc, but the version without __ is correct. #if BYTE_ORDER == LITTLE_ENDIAN inline uint8_t BitPackShift(uint8_t bit, uint8_t /*length*/) { return bit; } #elif BYTE_ORDER == BIG_ENDIAN inline uint8_t BitPackShift(uint8_t bit, uint8_t length) { return 64 - length - bit; } #else #error "Bit packing code isn't written for your byte order." #endif inline uint64_t ReadOff(const void *base, uint64_t bit_off) { return *reinterpret_cast<const uint64_t*>(reinterpret_cast<const uint8_t*>(base) + (bit_off >> 3)); } /* Pack integers up to 57 bits using their least significant digits. * The length is specified using mask: * Assumes mask == (1 << length) - 1 where length <= 57. */ inline uint64_t ReadInt57(const void *base, uint64_t bit_off, uint8_t length, uint64_t mask) { return (ReadOff(base, bit_off) >> BitPackShift(bit_off & 7, length)) & mask; } /* Assumes value < (1 << length) and length <= 57. * Assumes the memory is zero initially. */ inline void WriteInt57(void *base, uint64_t bit_off, uint8_t length, uint64_t value) { *reinterpret_cast<uint64_t*>(reinterpret_cast<uint8_t*>(base) + (bit_off >> 3)) |= (value << BitPackShift(bit_off & 7, length)); } /* Same caveats as above, but for a 25 bit limit. */ inline uint32_t ReadInt25(const void *base, uint64_t bit_off, uint8_t length, uint32_t mask) { return (*reinterpret_cast<const uint32_t*>(reinterpret_cast<const uint8_t*>(base) + (bit_off >> 3)) >> BitPackShift(bit_off & 7, length)) & mask; } inline void WriteInt25(void *base, uint64_t bit_off, uint8_t length, uint32_t value) { *reinterpret_cast<uint32_t*>(reinterpret_cast<uint8_t*>(base) + (bit_off >> 3)) |= (value << BitPackShift(bit_off & 7, length)); } typedef union { float f; uint32_t i; } FloatEnc; inline float ReadFloat32(const void *base, uint64_t bit_off) { FloatEnc encoded; encoded.i = ReadOff(base, bit_off) >> BitPackShift(bit_off & 7, 32); return encoded.f; } inline void WriteFloat32(void *base, uint64_t bit_off, float value) { FloatEnc encoded; encoded.f = value; WriteInt57(base, bit_off, 32, encoded.i); } const uint32_t kSignBit = 0x80000000; inline void SetSign(float &to) { FloatEnc enc; enc.f = to; enc.i |= kSignBit; to = enc.f; } inline void UnsetSign(float &to) { FloatEnc enc; enc.f = to; enc.i &= ~kSignBit; to = enc.f; } inline float <API key>(const void *base, uint64_t bit_off) { FloatEnc encoded; encoded.i = ReadOff(base, bit_off) >> BitPackShift(bit_off & 7, 31); // Sign bit set means negative. encoded.i |= kSignBit; return encoded.f; } inline void <API key>(void *base, uint64_t bit_off, float value) { FloatEnc encoded; encoded.f = value; encoded.i &= ~kSignBit; WriteInt57(base, bit_off, 31, encoded.i); } void BitPackingSanity(); // Return bits required to store integers upto max_value. Not the most // efficient implementation, but this is only called a few times to size tries. uint8_t RequiredBits(uint64_t max_value); struct BitsMask { static BitsMask ByMax(uint64_t max_value) { BitsMask ret; ret.FromMax(max_value); return ret; } static BitsMask ByBits(uint8_t bits) { BitsMask ret; ret.bits = bits; ret.mask = (1ULL << bits) - 1; return ret; } void FromMax(uint64_t max_value) { bits = RequiredBits(max_value); mask = (1ULL << bits) - 1; } uint8_t bits; uint64_t mask; }; } // namespace util #endif // UTIL_BIT_PACKING__
#endregion using System.Collections.Generic; using OpenRA.FileFormats; using OpenRA.Traits; namespace OpenRA.Mods.RA { [Desc("Used to waypoint units after production or repair is finished.")] public class RallyPointInfo : ITraitInfo { public readonly int[] RallyPoint = { 1, 3 }; public readonly string <API key> = "player"; public object Create(ActorInitializer init) { return new RallyPoint(init.self, this); } } public class RallyPoint : IIssueOrder, IResolveOrder, ISync { [Sync] public CPos rallyPoint; public int nearEnough = 1; public RallyPoint(Actor self, RallyPointInfo info) { rallyPoint = self.Location + new CVec(info.RallyPoint[0], info.RallyPoint[1]); self.World.AddFrameEndTask(w => w.Add(new Effects.RallyPoint(self, info.<API key>))); } public IEnumerable<IOrderTargeter> Orders { get { yield return new <API key>(); } } public Order IssueOrder( Actor self, IOrderTargeter order, Target target, bool queued ) { if( order.OrderID == "SetRallyPoint" ) return new Order(order.OrderID, self, false) { TargetLocation = target.CenterPosition.ToCPos() }; return null; } public void ResolveOrder( Actor self, Order order ) { if( order.OrderString == "SetRallyPoint" ) rallyPoint = order.TargetLocation; } class <API key> : IOrderTargeter { public string OrderID { get { return "SetRallyPoint"; } } public int OrderPriority { get { return 0; } } public bool CanTarget(Actor self, Target target, List<Actor> othersAtTarget, TargetModifiers modifiers, ref string cursor) { if (target.Type != TargetType.Terrain) return false; var location = target.CenterPosition.ToCPos(); if (self.World.Map.IsInMap(location)) { cursor = "ability"; return true; } return false; } public bool IsQueued { get { return false; } } // unused } } }
<! Data Source: http: HTML To Convert JSON http://convertjson.com/html-table-to-json.htm Creator : ARGE|LOG github@argelog.com.tr <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no" /> <title>summernote</title> <!-- include jquery --> <script src="//cdnjs.cloudflare.com/ajax/libs/jquery/3.2.1/jquery.js"></script> <!-- include libs stylesheets --> <link rel="stylesheet" href="//maxcdn.bootstrapcdn.com/bootstrap/4.0.0-beta/css/bootstrap.css" /> <script src="//cdnjs.cloudflare.com/ajax/libs/popper.js/1.12.3/umd/popper.js"></script> <script src="//maxcdn.bootstrapcdn.com/bootstrap/4.0.0-beta/js/bootstrap.js"></script> <!-- include summernote --> <link rel="stylesheet" href="../dist/summernote-bs4.css"> <script type="text/javascript" src="../dist/summernote-bs4.js"></script> <script src="https: <script type="text/javascript"> $(function() { $('.summernote').summernote({ height: 200, hint: { match: /=(\w{0,})$/, search: function(keyword, callback) { $.ajax({ url: '<API key>.json?v=1' }).then(function (data) { callback(data.filter(function(item){return item.Character.indexOf(keyword)>-1 || item.FIELD6.indexOf(keyword)>-1;})); }); }, content: function(item) { return item.FIELD6; }, template: function(item) { return '[<strong>' + item.FIELD6 + '</strong>] ' + item.Character; } } }); }); </script> </head> <body> <textarea class="summernote">type #su</textarea> </body> </html>
<?php defined('INTERNAL') || die(); $string['pluginname'] = 'Profile'; $string['profile'] = 'Profile'; $string['mandatory'] = 'Mandatory'; $string['public'] = 'Public'; $string['aboutdescription'] = 'Enter your real first and last name here. If you want to show a different name to people in the system, put that name in as your display name.'; $string['infoisprivate'] = 'This information is private until you include it in a page that is shared with others.'; $string['viewmyprofile'] = 'View my profile'; // profile categories $string['aboutme'] = 'About me'; $string['contact'] = 'Contact information'; $string['messaging'] = 'Messaging'; $string['general'] = 'General'; // profile fields $string['firstname'] = 'First Name'; $string['lastname'] = 'Last Name'; $string['fullname'] = 'Full Name'; $string['institution'] = 'Institution'; $string['studentid'] = 'Student ID'; $string['preferredname'] = 'Display Name'; $string['introduction'] = 'Introduction'; $string['email'] = 'Email Address'; $string['maildisabled'] = 'Email Disabled'; $string['officialwebsite'] = 'Official Website Address'; $string['personalwebsite'] = 'Personal Website Address'; $string['blogaddress'] = 'Blog Address'; $string['address'] = 'Postal Address'; $string['town'] = 'Town'; $string['city'] = 'City/Region'; $string['country'] = 'Country'; $string['homenumber'] = 'Home Phone'; $string['businessnumber'] = 'Business Phone'; $string['mobilenumber'] = 'Mobile Phone'; $string['faxnumber'] = 'Fax Number'; $string['icqnumber'] = 'ICQ Number'; $string['msnnumber'] = 'MSN Chat'; $string['aimscreenname'] = 'AIM Screen Name'; $string['yahoochat'] = 'Yahoo Chat'; $string['skypeusername'] = 'Skype Username'; $string['jabberusername'] = 'Jabber Username'; $string['occupation'] = 'Occupation'; $string['industry'] = 'Industry'; // Field names for view user and search user display $string['name'] = 'Name'; $string['<API key>'] = 'Primary email'; $string['emailaddress'] = 'Alternative email'; $string['saveprofile'] = 'Save Profile'; $string['profilesaved'] = 'Profile saved successfully'; $string['profilefailedsaved'] = 'Profile saving failed'; $string['<API key>'] = 'Email validation'; $string['<API key>'] = <<<EOF Hello %s, You have added email address %s to your user account in Mahara. Please visit the link below to activate this address. %s If this email belongs to you but you have not requested adding it to your Mahara account, follow the link below to decline email activation. %s EOF; $string['<API key>'] = 'a validation email will be sent when you save your profile'; $string['validationemailsent'] = 'a validation email has been sent'; $string['emailactivation'] = 'Email Activation'; $string['<API key>'] = 'Email Activation Successful'; $string['<API key>'] = 'Email already activiated'; $string['<API key>'] = 'Email Activation Failed'; $string['<API key>'] = 'Email Activation Declined Successfully'; $string['<API key>'] = 'Verification link expired'; $string['invalidemailaddress'] = 'Invalid email address'; $string['<API key>'] = 'The e-mail address you are trying to validate is already taken'; $string['addbutton'] = 'Add'; $string['emailingfailed'] = 'Profile saved, but emails were not sent to: %s'; $string['loseyourchanges'] = 'Lose your changes?'; $string['Title'] = 'Title'; $string['Created'] = 'Created'; $string['Description'] = 'Description'; $string['Download'] = 'Download'; $string['lastmodified'] = 'Last Modified'; $string['Owner'] = 'Owner'; $string['Preview'] = 'Preview'; $string['Size'] = 'Size'; $string['Type'] = 'Type'; $string['profileinformation'] = 'Profile Information'; $string['profilepage'] = 'Profile Page'; $string['viewprofilepage'] = 'View profile page'; $string['<API key>'] = 'View all profile information';
#include <parmetislib.h> /* Byte-wise swap two items of size SIZE. */ #define QSSWAP(a, b, stmp) do { stmp = (a); (a) = (b); (b) = stmp; } while (0) /* Discontinue quicksort algorithm when partition gets below this size. This particular magic number was chosen to work best on a Sun 4/260. */ #define MAX_THRESH 20 /* Stack node declarations used to store unfulfilled partition obligations. */ typedef struct { KeyValueType *lo; KeyValueType *hi; } stack_node; /* The next 4 #defines implement a very fast in-line stack abstraction. */ #define STACK_SIZE (8 * sizeof(unsigned long int)) #define PUSH(low, high) ((void) ((top->lo = (low)), (top->hi = (high)), ++top)) #define POP(low, high) ((void) (--top, (low = top->lo), (high = top->hi))) #define STACK_NOT_EMPTY (stack < top) void ikeyvalsort(int total_elems, KeyValueType *pbase) { KeyValueType pivot, stmp; if (total_elems == 0) /* Avoid lossage with unsigned arithmetic below. */ return; if (total_elems > MAX_THRESH) { KeyValueType *lo = pbase; KeyValueType *hi = &lo[total_elems - 1]; stack_node stack[STACK_SIZE]; /* Largest size needed for 32-bit int!!! */ stack_node *top = stack + 1; while (STACK_NOT_EMPTY) { KeyValueType *left_ptr; KeyValueType *right_ptr; KeyValueType *mid = lo + ((hi - lo) >> 1); if (mid->key < lo->key || (mid->key == lo->key && mid->val < lo->val)) QSSWAP(*mid, *lo, stmp); if (hi->key < mid->key || (hi->key == mid->key && hi->val < mid->val)) QSSWAP(*mid, *hi, stmp); else goto jump_over; if (mid->key < lo->key || (mid->key == lo->key && mid->val < lo->val)) QSSWAP(*mid, *lo, stmp); jump_over:; pivot = *mid; left_ptr = lo + 1; right_ptr = hi - 1; /* Here's the famous ``collapse the walls'' section of quicksort. Gotta like those tight inner loops! They are the main reason that this algorithm runs much faster than others. */ do { while (left_ptr->key < pivot.key || (left_ptr->key == pivot.key && left_ptr->val < pivot.val)) left_ptr++; while (pivot.key < right_ptr->key || (pivot.key == right_ptr->key && pivot.val < right_ptr->val)) right_ptr if (left_ptr < right_ptr) { QSSWAP (*left_ptr, *right_ptr, stmp); left_ptr++; right_ptr } else if (left_ptr == right_ptr) { left_ptr++; right_ptr break; } } while (left_ptr <= right_ptr); /* Set up pointers for next iteration. First determine whether left and right partitions are below the threshold size. If so, ignore one or both. Otherwise, push the larger partition's bounds on the stack and continue sorting the smaller one. */ if ((size_t) (right_ptr - lo) <= MAX_THRESH) { if ((size_t) (hi - left_ptr) <= MAX_THRESH) /* Ignore both small partitions. */ POP (lo, hi); else /* Ignore small left partition. */ lo = left_ptr; } else if ((size_t) (hi - left_ptr) <= MAX_THRESH) /* Ignore small right partition. */ hi = right_ptr; else if ((right_ptr - lo) > (hi - left_ptr)) { /* Push larger left partition indices. */ PUSH (lo, right_ptr); lo = left_ptr; } else { /* Push larger right partition indices. */ PUSH (left_ptr, hi); hi = right_ptr; } } } /* Once the BASE_PTR array is partially sorted by quicksort the rest is completely sorted using insertion sort, since this is efficient for partitions below MAX_THRESH size. BASE_PTR points to the beginning of the array to sort, and END_PTR points at the very last element in the array (*not* one beyond it!). */ { KeyValueType *end_ptr = &pbase[total_elems - 1]; KeyValueType *tmp_ptr = pbase; KeyValueType *thresh = (end_ptr < pbase + MAX_THRESH ? end_ptr : pbase + MAX_THRESH); register KeyValueType *run_ptr; /* Find smallest element in first threshold and place it at the array's beginning. This is the smallest array element, and the operation speeds up insertion sort's inner loop. */ for (run_ptr = tmp_ptr + 1; run_ptr <= thresh; run_ptr++) if (run_ptr->key < tmp_ptr->key || (run_ptr->key == tmp_ptr->key && run_ptr->val < tmp_ptr->val)) tmp_ptr = run_ptr; if (tmp_ptr != pbase) QSSWAP(*tmp_ptr, *pbase, stmp); /* Insertion sort, running from left-hand-side up to right-hand-side. */ run_ptr = pbase + 1; while (++run_ptr <= end_ptr) { tmp_ptr = run_ptr - 1; while (run_ptr->key < tmp_ptr->key || (run_ptr->key == tmp_ptr->key && run_ptr->val < tmp_ptr->val)) tmp_ptr tmp_ptr++; if (tmp_ptr != run_ptr) { KeyValueType elmnt = *run_ptr; KeyValueType *mptr; for (mptr=run_ptr; mptr>tmp_ptr; mptr *mptr = *(mptr-1); *mptr = elmnt; } } } }
#include "plSDL.h" unsigned int plSDL::VariableLengthRead(hsStream* S, size_t size) { if (size < 0x100) return S->readByte(); else if (size < 0x10000) return S->readShort(); else return S->readInt(); } void plSDL::VariableLengthWrite(hsStream* S, size_t size, unsigned int value) { if (size < 0x100) S->writeByte(value); else if (size < 0x10000) S->writeShort(value); else S->writeInt(value); }