code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
const NamingMixin = { _name: null, getName() { return this._name; }, _shortName: null, getShortName() { return this._shortName || this.getName(); }, _abbreviation: null, getAbbreviation() { return this._abbreviation || this.getShortName(); }, }; export default NamingMixin;
kjirou/reversi-tactics
src/mixins/NamingMixin.js
JavaScript
apache-2.0
309
from mainapp import create_app app = create_app() if __name__ == '__main__': app.run(host='0.0.0.0')
jonaubf/flask-mongo-testapp
testapp/run.py
Python
apache-2.0
107
/* * Copyright (c) WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.entitlement.filter.callback; import org.apache.commons.codec.binary.Base64; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.identity.entitlement.filter.exception.EntitlementFilterException; import javax.servlet.http.HttpServletRequest; public class BasicAuthCallBackHandler extends EntitlementFilterCallBackHandler { private static final Log log = LogFactory.getLog(BasicAuthCallBackHandler.class); public BasicAuthCallBackHandler(HttpServletRequest request) throws EntitlementFilterException { String authHeaderEn = null; if (!(request.getHeader("Authorization") == null || request.getHeader("Authorization").equals("null"))) { authHeaderEn = request.getHeader("Authorization"); String tempArr[] = authHeaderEn.split(" "); if (tempArr.length == 2) { String authHeaderDc = new String(Base64.decodeBase64(tempArr[1].getBytes())); tempArr = authHeaderDc.split(":"); if (tempArr.length == 2) { setUserName(tempArr[0]); } } throw new EntitlementFilterException("Unable to retrieve username from Authorization header"); } } }
wattale/carbon-identity
components/identity/org.wso2.carbon.identity.entitlement.filter/src/main/java/org/wso2/carbon/identity/entitlement/filter/callback/BasicAuthCallBackHandler.java
Java
apache-2.0
1,973
package main import ( "github.com/ActiveState/tail" "github.com/ugorji/go/codec" "io/ioutil" "log" "os" "reflect" "regexp" "strconv" "strings" "time" ) type inputTail struct { path string format string tag string pos_file string offset int64 sync_interval int codec *codec.JsonHandle time_key string } func (self *inputTail) Init(f map[string]string) error { self.sync_interval = 2 value := f["path"] if len(value) > 0 { self.path = value } value = f["format"] if len(value) > 0 { self.format = value if value == "json" { _codec := codec.JsonHandle{} _codec.MapType = reflect.TypeOf(map[string]interface{}(nil)) self.codec = &_codec value = f["time_key"] if len(value) > 0 { self.time_key = value } else { self.time_key = "time" } } } value = f["tag"] if len(value) > 0 { self.tag = value } value = f["pos_file"] if len(value) > 0 { self.pos_file = value str, err := ioutil.ReadFile(self.pos_file) if err != nil { log.Println("ioutil.ReadFile:", err) } f, err := os.Open(self.path) if err != nil { log.Println("os.Open:", err) } info, err := f.Stat() if err != nil { log.Println("f.Stat:", err) self.offset = 0 } else { offset, _ := strconv.Atoi(string(str)) if int64(offset) > info.Size() { self.offset = info.Size() } else { self.offset = int64(offset) } } } value = f["sync_interval"] if len(value) > 0 { sync_interval, err := strconv.Atoi(value) if err != nil { return err } self.sync_interval = sync_interval } return nil } func (self *inputTail) Run(runner InputRunner) error { defer func() { if err := recover(); err != nil { logs.Fatalln("recover panic at err:", err) } }() var seek int if self.offset > 0 { seek = os.SEEK_SET } else { seek = os.SEEK_END } t, err := tail.TailFile(self.path, tail.Config{ Poll: true, ReOpen: true, Follow: true, MustExist: false, Location: &tail.SeekInfo{int64(self.offset), seek}}) if err != nil { return err } f, err := os.OpenFile(self.pos_file, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0600) if err != nil { log.Fatalln("os.OpenFile", err) } defer f.Close() var re regexp.Regexp if string(self.format[0]) == string("/") || string(self.format[len(self.format)-1]) == string("/") { format := strings.Trim(self.format, "/") trueformat := regexp.MustCompile("\\(\\?<").ReplaceAllString(format, "(?P<") if trueformat != format { log.Printf("pos_file:%s, format:%s", self.path, trueformat) } re = *regexp.MustCompile(trueformat) self.format = "regexp" } else if self.format == "json" { } tick := time.NewTicker(time.Second * time.Duration(self.sync_interval)) count := 0 for { select { case <-tick.C: { if count > 0 { offset, err := t.Tell() if err != nil { log.Println("Tell return error: ", err) continue } str := strconv.Itoa(int(offset)) _, err = f.WriteAt([]byte(str), 0) if err != nil { log.Println("f.WriteAt", err) return err } count = 0 } } case line := <-t.Lines: { pack := <-runner.InChan() pack.MsgBytes = []byte(line.Text) pack.Msg.Tag = self.tag pack.Msg.Timestamp = line.Time.Unix() if self.format == "regexp" { text := re.FindSubmatch([]byte(line.Text)) if text == nil { pack.Recycle() continue } for i, name := range re.SubexpNames() { if len(name) > 0 { pack.Msg.Data[name] = string(text[i]) } } } else if self.format == "json" { dec := codec.NewDecoderBytes([]byte(line.Text), self.codec) err := dec.Decode(&pack.Msg.Data) if err != nil { log.Println("json.Unmarshal", err) pack.Recycle() continue } else { t, ok := pack.Msg.Data[self.time_key] if ok { if time, xx := t.(uint64); xx { pack.Msg.Timestamp = int64(time) delete(pack.Msg.Data, self.time_key) } else if time64, oo := t.(int64); oo { pack.Msg.Timestamp = time64 delete(pack.Msg.Data, self.time_key) } else { log.Println("time is not int64, ", t, " typeof:", reflect.TypeOf(t)) pack.Recycle() continue } } } } count++ runner.RouterChan() <- pack } } } err = t.Wait() if err != nil { return err } return err } func init() { RegisterInput("tail", func() interface{} { return new(inputTail) }) }
hnlq715/gofluent
in_tail.go
GO
apache-2.0
4,521
# Copyright 2012 OpenStack Foundation # Copyright 2013 Nebula Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """Identity v2 EC2 Credentials action implementations""" import logging import six from cliff import command from cliff import lister from cliff import show from openstackclient.common import utils from openstackclient.i18n import _ # noqa class CreateEC2Creds(show.ShowOne): """Create EC2 credentials""" log = logging.getLogger(__name__ + ".CreateEC2Creds") def get_parser(self, prog_name): parser = super(CreateEC2Creds, self).get_parser(prog_name) parser.add_argument( '--project', metavar='<project>', help=_('Specify a project [admin only]'), ) parser.add_argument( '--user', metavar='<user>', help=_('Specify a user [admin only]'), ) return parser def take_action(self, parsed_args): self.log.debug('take_action(%s)', parsed_args) identity_client = self.app.client_manager.identity if parsed_args.project: project = utils.find_resource( identity_client.tenants, parsed_args.project, ).id else: # Get the project from the current auth project = identity_client.auth_tenant_id if parsed_args.user: user = utils.find_resource( identity_client.users, parsed_args.user, ).id else: # Get the user from the current auth user = identity_client.auth_user_id creds = identity_client.ec2.create(user, project) info = {} info.update(creds._info) return zip(*sorted(six.iteritems(info))) class DeleteEC2Creds(command.Command): """Delete EC2 credentials""" log = logging.getLogger(__name__ + '.DeleteEC2Creds') def get_parser(self, prog_name): parser = super(DeleteEC2Creds, self).get_parser(prog_name) parser.add_argument( 'access_key', metavar='<access-key>', help=_('Credentials access key'), ) parser.add_argument( '--user', metavar='<user>', help=_('Specify a user [admin only]'), ) return parser def take_action(self, parsed_args): self.log.debug('take_action(%s)', parsed_args) identity_client = self.app.client_manager.identity if parsed_args.user: user = utils.find_resource( identity_client.users, parsed_args.user, ).id else: # Get the user from the current auth user = identity_client.auth_user_id identity_client.ec2.delete(user, parsed_args.access_key) class ListEC2Creds(lister.Lister): """List EC2 credentials""" log = logging.getLogger(__name__ + '.ListEC2Creds') def get_parser(self, prog_name): parser = super(ListEC2Creds, self).get_parser(prog_name) parser.add_argument( '--user', metavar='<user>', help=_('Specify a user [admin only]'), ) return parser def take_action(self, parsed_args): self.log.debug('take_action(%s)', parsed_args) identity_client = self.app.client_manager.identity if parsed_args.user: user = utils.find_resource( identity_client.users, parsed_args.user, ).id else: # Get the user from the current auth user = identity_client.auth_user_id columns = ('access', 'secret', 'tenant_id', 'user_id') column_headers = ('Access', 'Secret', 'Project ID', 'User ID') data = identity_client.ec2.list(user) return (column_headers, (utils.get_item_properties( s, columns, formatters={}, ) for s in data)) class ShowEC2Creds(show.ShowOne): """Show EC2 credentials""" log = logging.getLogger(__name__ + '.ShowEC2Creds') def get_parser(self, prog_name): parser = super(ShowEC2Creds, self).get_parser(prog_name) parser.add_argument( 'access_key', metavar='<access-key>', help=_('Credentials access key'), ) parser.add_argument( '--user', metavar='<user>', help=_('Specify a user [admin only]'), ) return parser def take_action(self, parsed_args): self.log.debug('take_action(%s)', parsed_args) identity_client = self.app.client_manager.identity if parsed_args.user: user = utils.find_resource( identity_client.users, parsed_args.user, ).id else: # Get the user from the current auth user = identity_client.auth_user_id creds = identity_client.ec2.get(user, parsed_args.access_key) info = {} info.update(creds._info) return zip(*sorted(six.iteritems(info)))
varunarya10/python-openstackclient
openstackclient/identity/v2_0/ec2creds.py
Python
apache-2.0
5,662
/* * Copyright 2010 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.runtime.pipeline; /** * * <p>This api is experimental and thus the classes and the interfaces returned are subject to change.</p> */ public interface Transformer extends Emitter, Receiver, Stage { }
mariofusco/droolsjbpm-integration
drools-pipeline/src/main/java/org/drools/runtime/pipeline/Transformer.java
Java
apache-2.0
836
/******************************************************************************* * Copyright (c) 2015 IBM Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package com.ibm.ws.lars.rest; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.Map; import com.ibm.ws.lars.rest.model.Asset; import com.ibm.ws.lars.rest.model.AssetList; /** * */ public class TestUtils { /** * Reads the specified InputStream and returns a byte array containing all the bytes read. */ public static byte[] slurp(InputStream is) throws IOException { byte[] buffer = new byte[1024]; int length; ByteArrayOutputStream baos = new ByteArrayOutputStream(); while ((length = is.read(buffer)) != -1) { baos.write(buffer, 0, length); } return baos.toByteArray(); } /** * Assert that an AssetList contains exactly the given list of assets * <p> * This method assumes that all assets have an ID and there are no duplicates in the asset list. */ public static void assertAssetList(AssetList list, Asset... assets) { Map<String, Asset> assetIdMap = new HashMap<>(); for (Asset asset : assets) { if (assetIdMap.put(asset.get_id(), asset) != null) { throw new AssertionError("Duplicate found in list of expected assets:\n" + asset.toJson()); } } for (Asset asset : list) { if (assetIdMap.remove(asset.get_id()) == null) { throw new AssertionError("Unexpected asset found in the asset list:\n" + asset.toJson()); } } if (!assetIdMap.isEmpty()) { StringBuilder message = new StringBuilder("Assets missing from asset list:\n"); for (Asset asset : assetIdMap.values()) { message.append(asset.toJson()); message.append("\n"); } throw new AssertionError(message.toString()); } } }
antelder/tool.lars
server/src/test/java/com/ibm/ws/lars/rest/TestUtils.java
Java
apache-2.0
2,655
# Copyright 2012 SINA Corporation # Copyright 2014 Cisco Systems, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """Extracts OpenStack config option info from module(s).""" from __future__ import print_function import argparse import imp import os import re import socket import sys import textwrap from oslo.config import cfg import six import stevedore.named from climate.openstack.common import gettextutils from climate.openstack.common import importutils gettextutils.install('climate') STROPT = "StrOpt" BOOLOPT = "BoolOpt" INTOPT = "IntOpt" FLOATOPT = "FloatOpt" LISTOPT = "ListOpt" DICTOPT = "DictOpt" MULTISTROPT = "MultiStrOpt" OPT_TYPES = { STROPT: 'string value', BOOLOPT: 'boolean value', INTOPT: 'integer value', FLOATOPT: 'floating point value', LISTOPT: 'list value', DICTOPT: 'dict value', MULTISTROPT: 'multi valued', } OPTION_REGEX = re.compile(r"(%s)" % "|".join([STROPT, BOOLOPT, INTOPT, FLOATOPT, LISTOPT, DICTOPT, MULTISTROPT])) PY_EXT = ".py" BASEDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../../")) WORDWRAP_WIDTH = 60 def generate(argv): parser = argparse.ArgumentParser( description='generate sample configuration file', ) parser.add_argument('-m', dest='modules', action='append') parser.add_argument('-l', dest='libraries', action='append') parser.add_argument('srcfiles', nargs='*') parsed_args = parser.parse_args(argv) mods_by_pkg = dict() for filepath in parsed_args.srcfiles: pkg_name = filepath.split(os.sep)[1] mod_str = '.'.join(['.'.join(filepath.split(os.sep)[:-1]), os.path.basename(filepath).split('.')[0]]) mods_by_pkg.setdefault(pkg_name, list()).append(mod_str) # NOTE(lzyeval): place top level modules before packages pkg_names = sorted(pkg for pkg in mods_by_pkg if pkg.endswith(PY_EXT)) ext_names = sorted(pkg for pkg in mods_by_pkg if pkg not in pkg_names) pkg_names.extend(ext_names) # opts_by_group is a mapping of group name to an options list # The options list is a list of (module, options) tuples opts_by_group = {'DEFAULT': []} if parsed_args.modules: for module_name in parsed_args.modules: module = _import_module(module_name) if module: for group, opts in _list_opts(module): opts_by_group.setdefault(group, []).append((module_name, opts)) # Look for entry points defined in libraries (or applications) for # option discovery, and include their return values in the output. # # Each entry point should be a function returning an iterable # of pairs with the group name (or None for the default group) # and the list of Opt instances for that group. if parsed_args.libraries: loader = stevedore.named.NamedExtensionManager( 'oslo.config.opts', names=list(set(parsed_args.libraries)), invoke_on_load=False, ) for ext in loader: for group, opts in ext.plugin(): opt_list = opts_by_group.setdefault(group or 'DEFAULT', []) opt_list.append((ext.name, opts)) for pkg_name in pkg_names: mods = mods_by_pkg.get(pkg_name) mods.sort() for mod_str in mods: if mod_str.endswith('.__init__'): mod_str = mod_str[:mod_str.rfind(".")] mod_obj = _import_module(mod_str) if not mod_obj: raise RuntimeError("Unable to import module %s" % mod_str) for group, opts in _list_opts(mod_obj): opts_by_group.setdefault(group, []).append((mod_str, opts)) print_group_opts('DEFAULT', opts_by_group.pop('DEFAULT', [])) for group in sorted(opts_by_group.keys()): print_group_opts(group, opts_by_group[group]) def _import_module(mod_str): try: if mod_str.startswith('bin.'): imp.load_source(mod_str[4:], os.path.join('bin', mod_str[4:])) return sys.modules[mod_str[4:]] else: return importutils.import_module(mod_str) except Exception as e: sys.stderr.write("Error importing module %s: %s\n" % (mod_str, str(e))) return None def _is_in_group(opt, group): "Check if opt is in group." for value in group._opts.values(): # NOTE(llu): Temporary workaround for bug #1262148, wait until # newly released oslo.config support '==' operator. if not(value['opt'] != opt): return True return False def _guess_groups(opt, mod_obj): # is it in the DEFAULT group? if _is_in_group(opt, cfg.CONF): return 'DEFAULT' # what other groups is it in? for value in cfg.CONF.values(): if isinstance(value, cfg.CONF.GroupAttr): if _is_in_group(opt, value._group): return value._group.name raise RuntimeError( "Unable to find group for option %s, " "maybe it's defined twice in the same group?" % opt.name ) def _list_opts(obj): def is_opt(o): return (isinstance(o, cfg.Opt) and not isinstance(o, cfg.SubCommandOpt)) opts = list() for attr_str in dir(obj): attr_obj = getattr(obj, attr_str) if is_opt(attr_obj): opts.append(attr_obj) elif (isinstance(attr_obj, list) and all(map(lambda x: is_opt(x), attr_obj))): opts.extend(attr_obj) ret = {} for opt in opts: ret.setdefault(_guess_groups(opt, obj), []).append(opt) return ret.items() def print_group_opts(group, opts_by_module): print("[%s]" % group) print('') for mod, opts in opts_by_module: print('#') print('# Options defined in %s' % mod) print('#') print('') for opt in opts: _print_opt(opt) print('') def _get_my_ip(): try: csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) csock.connect(('8.8.8.8', 80)) (addr, port) = csock.getsockname() csock.close() return addr except socket.error: return None def _sanitize_default(name, value): """Set up a reasonably sensible default for pybasedir, my_ip and host.""" if value.startswith(sys.prefix): # NOTE(jd) Don't use os.path.join, because it is likely to think the # second part is an absolute pathname and therefore drop the first # part. value = os.path.normpath("/usr/" + value[len(sys.prefix):]) elif value.startswith(BASEDIR): return value.replace(BASEDIR, '/usr/lib/python/site-packages') elif BASEDIR in value: return value.replace(BASEDIR, '') elif value == _get_my_ip(): return '10.0.0.1' elif value in (socket.gethostname(), socket.getfqdn()) and 'host' in name: return 'climate' elif value.strip() != value: return '"%s"' % value return value def _print_opt(opt): opt_name, opt_default, opt_help = opt.dest, opt.default, opt.help if not opt_help: sys.stderr.write('WARNING: "%s" is missing help string.\n' % opt_name) opt_help = "" opt_type = None try: opt_type = OPTION_REGEX.search(str(type(opt))).group(0) except (ValueError, AttributeError) as err: sys.stderr.write("%s\n" % str(err)) sys.exit(1) opt_help = u'%s (%s)' % (opt_help, OPT_TYPES[opt_type]) print('#', "\n# ".join(textwrap.wrap(opt_help, WORDWRAP_WIDTH))) if opt.deprecated_opts: for deprecated_opt in opt.deprecated_opts: if deprecated_opt.name: deprecated_group = (deprecated_opt.group if deprecated_opt.group else "DEFAULT") print('# Deprecated group/name - [%s]/%s' % (deprecated_group, deprecated_opt.name)) try: if opt_default is None: print('#%s=<None>' % opt_name) elif opt_type == STROPT: assert(isinstance(opt_default, six.string_types)) print('#%s=%s' % (opt_name, _sanitize_default(opt_name, opt_default))) elif opt_type == BOOLOPT: assert(isinstance(opt_default, bool)) print('#%s=%s' % (opt_name, str(opt_default).lower())) elif opt_type == INTOPT: assert(isinstance(opt_default, int) and not isinstance(opt_default, bool)) print('#%s=%s' % (opt_name, opt_default)) elif opt_type == FLOATOPT: assert(isinstance(opt_default, float)) print('#%s=%s' % (opt_name, opt_default)) elif opt_type == LISTOPT: assert(isinstance(opt_default, list)) print('#%s=%s' % (opt_name, ','.join(opt_default))) elif opt_type == DICTOPT: assert(isinstance(opt_default, dict)) opt_default_strlist = [str(key) + ':' + str(value) for (key, value) in opt_default.items()] print('#%s=%s' % (opt_name, ','.join(opt_default_strlist))) elif opt_type == MULTISTROPT: assert(isinstance(opt_default, list)) if not opt_default: opt_default = [''] for default in opt_default: print('#%s=%s' % (opt_name, default)) print('') except Exception: sys.stderr.write('Error in option "%s"\n' % opt_name) sys.exit(1) def main(): generate(sys.argv[1:]) if __name__ == '__main__': main()
frossigneux/blazar
climate/openstack/common/config/generator.py
Python
apache-2.0
10,412
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.management.internal.cli.commands; import java.util.List; import java.util.Set; import org.springframework.shell.core.annotation.CliCommand; import org.springframework.shell.core.annotation.CliOption; import org.apache.geode.cache.execute.ResultCollector; import org.apache.geode.distributed.DistributedMember; import org.apache.geode.management.cli.CliMetaData; import org.apache.geode.management.cli.ConverterHint; import org.apache.geode.management.cli.Result; import org.apache.geode.management.internal.cli.CliUtil; import org.apache.geode.management.internal.cli.domain.SubscriptionQueueSizeResult; import org.apache.geode.management.internal.cli.functions.GetSubscriptionQueueSizeFunction; import org.apache.geode.management.internal.cli.i18n.CliStrings; import org.apache.geode.management.internal.cli.result.ResultBuilder; import org.apache.geode.management.internal.security.ResourceOperation; import org.apache.geode.security.ResourcePermission; public class CountDurableCQEventsCommand extends InternalGfshCommand { DurableClientCommandsResultBuilder builder = new DurableClientCommandsResultBuilder(); @CliCommand(value = CliStrings.COUNT_DURABLE_CQ_EVENTS, help = CliStrings.COUNT_DURABLE_CQ_EVENTS__HELP) @CliMetaData() @ResourceOperation(resource = ResourcePermission.Resource.CLUSTER, operation = ResourcePermission.Operation.READ) public Result countDurableCqEvents( @CliOption(key = CliStrings.COUNT_DURABLE_CQ_EVENTS__DURABLE__CLIENT__ID, mandatory = true, help = CliStrings.COUNT_DURABLE_CQ_EVENTS__DURABLE__CLIENT__ID__HELP) final String durableClientId, @CliOption(key = CliStrings.COUNT_DURABLE_CQ_EVENTS__DURABLE__CQ__NAME, help = CliStrings.COUNT_DURABLE_CQ_EVENTS__DURABLE__CQ__NAME__HELP) final String cqName, @CliOption(key = {CliStrings.MEMBER, CliStrings.MEMBERS}, help = CliStrings.COUNT_DURABLE_CQ_EVENTS__MEMBER__HELP, optionContext = ConverterHint.MEMBERIDNAME) final String[] memberNameOrId, @CliOption(key = {CliStrings.GROUP, CliStrings.GROUPS}, help = CliStrings.COUNT_DURABLE_CQ_EVENTS__GROUP__HELP, optionContext = ConverterHint.MEMBERGROUP) final String[] group) { Result result; try { Set<DistributedMember> targetMembers = findMembers(group, memberNameOrId); if (targetMembers.isEmpty()) { return ResultBuilder.createUserErrorResult(CliStrings.NO_MEMBERS_FOUND_MESSAGE); } String[] params = new String[2]; params[0] = durableClientId; params[1] = cqName; final ResultCollector<?, ?> rc = CliUtil.executeFunction(new GetSubscriptionQueueSizeFunction(), params, targetMembers); final List<SubscriptionQueueSizeResult> funcResults = (List<SubscriptionQueueSizeResult>) rc.getResult(); String queueSizeColumnName; if (cqName != null && !cqName.isEmpty()) { queueSizeColumnName = CliStrings .format(CliStrings.COUNT_DURABLE_CQ_EVENTS__SUBSCRIPTION__QUEUE__SIZE__CLIENT, cqName); } else { queueSizeColumnName = CliStrings.format( CliStrings.COUNT_DURABLE_CQ_EVENTS__SUBSCRIPTION__QUEUE__SIZE__CLIENT, durableClientId); } result = builder.buildTableResultForQueueSize(funcResults, queueSizeColumnName); } catch (Exception e) { result = ResultBuilder.createGemFireErrorResult(e.getMessage()); } return result; } }
pdxrunner/geode
geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/CountDurableCQEventsCommand.java
Java
apache-2.0
4,255
/** * @author Oleksandr Prunyak (987456987p@gmail.com) * @version $Id$ * @since 0.1 */ package ru.job4j.loop;
Alex0889/oprunyak
chapter_001/src/test/java/ru/job4j/loop/package-info.java
Java
apache-2.0
110
/** * @license * Copyright 2018 Palantir Technologies, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import * as Lint from "../../index"; // tslint:disable: object-literal-sort-keys export const codeExamples = [ { description: "Prefer `while` loops instead of `for` loops without an initializer and incrementor.", config: Lint.Utils.dedent` "rules": { "prefer-while": true } `, pass: Lint.Utils.dedent` for(let i = 1; i < 10; i++) { console.log(i); } for (let i = 0; i < 10; i+=1) { console.log(i); } for (let i = 0; i < 10;) { i += 1; } `, fail: Lint.Utils.dedent` for(;;) { console.log('Hello World'); } for(;true===true;) { console.log('Hello World'); } `, }, ];
andy-hanson/tslint
src/rules/code-examples/preferWhile.examples.ts
TypeScript
apache-2.0
1,470
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "Firestore/core/src/local/memory_persistence.h" #include "Firestore/core/test/unit/local/bundle_cache_test.h" #include "Firestore/core/test/unit/local/persistence_testing.h" namespace firebase { namespace firestore { namespace local { namespace { std::unique_ptr<Persistence> PersistenceFactory() { return MemoryPersistenceWithEagerGcForTesting(); } } // namespace INSTANTIATE_TEST_SUITE_P(MemoryBundleCacheTest, BundleCacheTest, testing::Values(PersistenceFactory)); } // namespace local } // namespace firestore } // namespace firebase
firebase/firebase-ios-sdk
Firestore/core/test/unit/local/memory_bundle_cache_test.cc
C++
apache-2.0
1,204
<?php /** * Copyright 2011 Crucial Web Studio, LLC or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * https://raw.githubusercontent.com/chargely/chargify-sdk-php/master/LICENSE.md * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ namespace Crucial\Service; use GuzzleHttp\Client; use GuzzleHttp\Psr7; use GuzzleHttp\Psr7\Request; use GuzzleHttp\Psr7\Response; use GuzzleHttp\Exception\RequestException; use GuzzleHttp\HandlerStack; use Crucial\Service\Chargify\Exception\BadMethodCallException; use Crucial\Service\Chargify\Adjustment; use Crucial\Service\Chargify\Charge; use Crucial\Service\Chargify\Component; use Crucial\Service\Chargify\Coupon; use Crucial\Service\Chargify\Customer; use Crucial\Service\Chargify\Event; use Crucial\Service\Chargify\Product; use Crucial\Service\Chargify\Refund; use Crucial\Service\Chargify\Statement; use Crucial\Service\Chargify\Stats; use Crucial\Service\Chargify\Subscription; use Crucial\Service\Chargify\Transaction; use Crucial\Service\Chargify\Webhook; class Chargify { /** * Version */ const VERSION = '0.1.1'; /** * Guzzle http client * * @var Client */ private $httpClient; /** * The complete hostname; e.g. "my-app-subdomain.chargify.com", * not just "my-app-subdomain" * * @var string */ protected $hostname; /** * Your http authentication password. The password is always "x". * * @var string */ protected $password = 'x'; /** * Your api key * * @var string */ protected $apiKey; /** * Shared key * * @var string */ protected $sharedKey; /** * Timeout * * @var int */ protected $timeout = 10; /* * json * * @var string */ protected $format = 'json'; /** * Config used in constructor. * * @var array */ protected $config; /** * @var Response|false|null */ protected $lastResponse; /** * Initialize the service * * @param array $config */ public function __construct($config) { // store a copy $this->config = $config; // set individual properties $this->hostname = trim($config['hostname'], '/'); $this->apiKey = $config['api_key']; $this->sharedKey = $config['shared_key']; if (!empty($config['timeout'])) { $this->timeout = $config['timeout']; } $this->httpClient = new Client([ 'base_uri' => 'https://' . $this->hostname . '/', 'handler' => HandlerStack::create(), 'timeout' => $this->timeout, 'allow_redirects' => false, 'auth' => [$this->apiKey, $this->password], 'headers' => [ 'User-Agent' => 'chargify-sdk-php/' . self::VERSION . ' (https://github.com/chargely/chargify-sdk-php)', 'Content-Type' => 'application/' . $this->format ] ]); } /** * @return Client */ public function getHttpClient() { return $this->httpClient; } /** * Returns config sent in constructor * * @return array */ public function getConfig() { return $this->config; } /** * Send the request to Chargify * * @param string $path URL path we are requesting such as: /subscriptions/<subscription_id>/adjustments * @param string $method GET, POST, PUT, DELETE * @param string $rawData * @param array $params * * @return Response|FALSE Response object or FALSE if there was no response (networking error, timeout, etc.) */ public function request($path, $method, $rawData = null, $params = []) { $method = strtoupper($method); $path = ltrim($path, '/'); $path = $path . '.' . $this->format; $client = $this->getHttpClient(); $method = strtoupper($method); $options = [ 'query' => $params, 'body' => null, ]; $request = new Request($method, $path); if (in_array($method, array('POST', 'PUT'))) { if (null === $rawData) { throw new BadMethodCallException('You must send raw data in a POST or PUT request'); } } if (!empty($rawData)) { $options['body'] = Psr7\stream_for($rawData); } try { $response = $client->send($request, $options); } catch (RequestException $e) { if ($e->hasResponse()) { $response = $e->getResponse(); } else { $response = false; } } $this->lastResponse = $response; return $response; } /** * @return Response */ public function getLastResponse() { return $this->lastResponse; } /** * Helper for instantiating an instance of Customer * * @return Customer */ public function customer() { return new Customer($this); } /** * Helper for instantiating an instance of Subscription * * @return Subscription */ public function subscription() { return new Subscription($this); } /** * Helper for instantiating an instance of Product * * @return Product */ public function product() { return new Product($this); } /** * Helper for instantiating an instance of Adjustment * * @return Adjustment */ public function adjustment() { return new Adjustment($this); } /** * Helper for instantiating an instance of Charge * * @return Charge */ public function charge() { return new Charge($this); } /** * Helper for instantiating an instance of Component * * @return Component */ public function component() { return new Component($this); } /** * Helper for instantiating an instance of Coupon * * @return Coupon */ public function coupon() { return new Coupon($this); } /** * Helper for instantiating an instance of Transaction * * @return Transaction */ public function transaction() { return new Transaction($this); } /** * Helper for instantiating an instance of Refund * * @return Refund */ public function refund() { return new Refund($this); } /** * Helper for instantiating an instance of Statement * * @return Statement */ public function statement() { return new Statement($this); } /** * Helper for instantiating an instance of Event * * @return Event */ public function event() { return new Event($this); } /** * Helper for instantiating an instance of Webhook * * @return Webhook */ public function webhook() { return new Webhook($this); } /** * Helper for instantiating an instance of Stats * * @return Stats */ public function stats() { return new Stats($this); } }
chargely/chargify-sdk-php
src/Crucial/Service/Chargify.php
PHP
apache-2.0
7,766
/* * Copyright 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.template.soy.exprtree; /** * Container of nodes representing operators. * * <p> Important: Do not use outside of Soy code (treat as superpackage-private). * * @author Kai Huang */ public class OperatorNodes { private OperatorNodes() {} /** * Node representing the unary '-' (negative) operator. */ public static class NegativeOpNode extends AbstractOperatorNode { public NegativeOpNode() { super(Operator.NEGATIVE); } protected NegativeOpNode(NegativeOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.NEGATIVE_OP_NODE; } @Override public NegativeOpNode clone() { return new NegativeOpNode(this); } } /** * Node representing the 'not' operator. */ public static class NotOpNode extends AbstractOperatorNode { public NotOpNode() { super(Operator.NOT); } protected NotOpNode(NotOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.NOT_OP_NODE; } @Override public NotOpNode clone() { return new NotOpNode(this); } } /** * Node representing the '*' (times) operator. */ public static class TimesOpNode extends AbstractOperatorNode { public TimesOpNode() { super(Operator.TIMES); } protected TimesOpNode(TimesOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.TIMES_OP_NODE; } @Override public TimesOpNode clone() { return new TimesOpNode(this); } } /** * Node representing the '/' (divde by) operator. */ public static class DivideByOpNode extends AbstractOperatorNode { public DivideByOpNode() { super(Operator.DIVIDE_BY); } protected DivideByOpNode(DivideByOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.DIVIDE_BY_OP_NODE; } @Override public DivideByOpNode clone() { return new DivideByOpNode(this); } } /** * Node representing the '%' (mod) operator. */ public static class ModOpNode extends AbstractOperatorNode { public ModOpNode() { super(Operator.MOD); } protected ModOpNode(ModOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.MOD_OP_NODE; } @Override public ModOpNode clone() { return new ModOpNode(this); } } /** * Node representing the '+' (plus) operator. */ public static class PlusOpNode extends AbstractOperatorNode { public PlusOpNode() { super(Operator.PLUS); } protected PlusOpNode(PlusOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.PLUS_OP_NODE; } @Override public PlusOpNode clone() { return new PlusOpNode(this); } } /** * Node representing the binary '-' (minus) operator. */ public static class MinusOpNode extends AbstractOperatorNode { public MinusOpNode() { super(Operator.MINUS); } protected MinusOpNode(MinusOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.MINUS_OP_NODE; } @Override public MinusOpNode clone() { return new MinusOpNode(this); } } /** * Node representing the '&lt;' (less than) operator. */ public static class LessThanOpNode extends AbstractOperatorNode { public LessThanOpNode() { super(Operator.LESS_THAN); } protected LessThanOpNode(LessThanOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.LESS_THAN_OP_NODE; } @Override public LessThanOpNode clone() { return new LessThanOpNode(this); } } /** * Node representing the '&gt;' (greater than) operator. */ public static class GreaterThanOpNode extends AbstractOperatorNode { public GreaterThanOpNode() { super(Operator.GREATER_THAN); } protected GreaterThanOpNode(GreaterThanOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.GREATER_THAN_OP_NODE; } @Override public GreaterThanOpNode clone() { return new GreaterThanOpNode(this); } } /** * Node representing the '&lt;=' (less than or equal) operator. */ public static class LessThanOrEqualOpNode extends AbstractOperatorNode { public LessThanOrEqualOpNode() { super(Operator.LESS_THAN_OR_EQUAL); } protected LessThanOrEqualOpNode(LessThanOrEqualOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.LESS_THAN_OR_EQUAL_OP_NODE; } @Override public LessThanOrEqualOpNode clone() { return new LessThanOrEqualOpNode(this); } } /** * Node representing the '&gt;=' (greater than or equal) operator. */ public static class GreaterThanOrEqualOpNode extends AbstractOperatorNode { public GreaterThanOrEqualOpNode() { super(Operator.GREATER_THAN_OR_EQUAL); } protected GreaterThanOrEqualOpNode(GreaterThanOrEqualOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.GREATER_THAN_OR_EQUAL_OP_NODE; } @Override public GreaterThanOrEqualOpNode clone() { return new GreaterThanOrEqualOpNode(this); } } /** * Node representing the '==' (equal) operator. */ public static class EqualOpNode extends AbstractOperatorNode { public EqualOpNode() { super(Operator.EQUAL); } protected EqualOpNode(EqualOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.EQUAL_OP_NODE; } @Override public EqualOpNode clone() { return new EqualOpNode(this); } } /** * Node representing the '!=' (not equal) operator. */ public static class NotEqualOpNode extends AbstractOperatorNode { public NotEqualOpNode() { super(Operator.NOT_EQUAL); } protected NotEqualOpNode(NotEqualOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.NOT_EQUAL_OP_NODE; } @Override public NotEqualOpNode clone() { return new NotEqualOpNode(this); } } /** * Node representing the 'and' operator. */ public static class AndOpNode extends AbstractOperatorNode { public AndOpNode() { super(Operator.AND); } protected AndOpNode(AndOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.AND_OP_NODE; } @Override public AndOpNode clone() { return new AndOpNode(this); } } /** * Node representing the 'or' operator. */ public static class OrOpNode extends AbstractOperatorNode { public OrOpNode() { super(Operator.OR); } protected OrOpNode(OrOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.OR_OP_NODE; } @Override public OrOpNode clone() { return new OrOpNode(this); } } /** * Node representing the ternary '? :' (conditional) operator. */ public static class ConditionalOpNode extends AbstractOperatorNode { public ConditionalOpNode() { super(Operator.CONDITIONAL); } protected ConditionalOpNode(ConditionalOpNode orig) { super(orig); } @Override public Kind getKind() { return Kind.CONDITIONAL_OP_NODE; } @Override public ConditionalOpNode clone() { return new ConditionalOpNode(this); } } }
Digaku/closure-template
java/src/com/google/template/soy/exprtree/OperatorNodes.java
Java
apache-2.0
7,510
/* * Copyright 2022 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.remote.work.artifact; import com.thoughtworks.go.plugin.access.artifact.ArtifactExtensionConstants; import com.thoughtworks.go.plugin.api.request.GoApiRequest; import com.thoughtworks.go.plugin.api.response.DefaultGoApiResponse; import com.thoughtworks.go.plugin.api.response.GoApiResponse; import com.thoughtworks.go.plugin.infra.GoPluginApiRequestProcessor; import com.thoughtworks.go.plugin.infra.plugininfo.GoPluginDescriptor; import com.thoughtworks.go.remote.work.artifact.ConsoleLogMessage.LogLevel; import com.thoughtworks.go.util.command.*; import com.thoughtworks.go.work.GoPublisher; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import static java.lang.String.format; public class ArtifactRequestProcessor implements GoPluginApiRequestProcessor { private static final List<String> goSupportedVersions = ArtifactExtensionConstants.SUPPORTED_VERSIONS; private final SafeOutputStreamConsumer safeOutputStreamConsumer; private final ProcessType processType; private enum ProcessType { FETCH, PUBLISH } private static final Map<LogLevel, String> FETCH_ARTIFACT_LOG_LEVEL_TAG = new HashMap<LogLevel, String>() {{ put(LogLevel.INFO, TaggedStreamConsumer.OUT); put(LogLevel.ERROR, TaggedStreamConsumer.ERR); }}; private static final Map<LogLevel, String> PUBLISH_ARTIFACT_LOG_LEVEL_TAG = new HashMap<LogLevel, String>() {{ put(LogLevel.INFO, TaggedStreamConsumer.PUBLISH); put(LogLevel.ERROR, TaggedStreamConsumer.PUBLISH_ERR); }}; private ArtifactRequestProcessor(GoPublisher publisher, ProcessType processType, EnvironmentVariableContext environmentVariableContext) { CompositeConsumer errorStreamConsumer = new CompositeConsumer(CompositeConsumer.ERR, publisher); CompositeConsumer outputStreamConsumer = new CompositeConsumer(CompositeConsumer.OUT, publisher); this.safeOutputStreamConsumer = new SafeOutputStreamConsumer(new ProcessOutputStreamConsumer(errorStreamConsumer, outputStreamConsumer)); safeOutputStreamConsumer.addSecrets(environmentVariableContext.secrets()); this.processType = processType; } public static ArtifactRequestProcessor forFetchArtifact(GoPublisher goPublisher, EnvironmentVariableContext environmentVariableContext) { return new ArtifactRequestProcessor(goPublisher, ProcessType.FETCH, environmentVariableContext); } public static ArtifactRequestProcessor forPublishArtifact(GoPublisher goPublisher, EnvironmentVariableContext environmentVariableContext) { return new ArtifactRequestProcessor(goPublisher, ProcessType.PUBLISH, environmentVariableContext); } @Override public GoApiResponse process(GoPluginDescriptor pluginDescriptor, GoApiRequest request) { validatePluginRequest(request); switch (Request.fromString(request.api())) { case CONSOLE_LOG: return processConsoleLogRequest(pluginDescriptor, request); default: return DefaultGoApiResponse.error("Illegal api request"); } } private GoApiResponse processConsoleLogRequest(GoPluginDescriptor pluginDescriptor, GoApiRequest request) { final ConsoleLogMessage consoleLogMessage = ConsoleLogMessage.fromJSON(request.requestBody()); final String message = format("[%s] %s", pluginDescriptor.id(), consoleLogMessage.getMessage()); Optional<String> parsedTag = parseTag(processType, consoleLogMessage.getLogLevel()); if (parsedTag.isPresent()) { safeOutputStreamConsumer.taggedStdOutput(parsedTag.get(), message); return DefaultGoApiResponse.success(null); } return DefaultGoApiResponse.error(format("Unsupported log level `%s`.", consoleLogMessage.getLogLevel())); } private Optional<String> parseTag(ProcessType requestType, LogLevel logLevel) { switch (requestType) { case FETCH: return Optional.ofNullable(FETCH_ARTIFACT_LOG_LEVEL_TAG.get(logLevel)); case PUBLISH: return Optional.ofNullable(PUBLISH_ARTIFACT_LOG_LEVEL_TAG.get(logLevel)); } return Optional.empty(); } private void validatePluginRequest(GoApiRequest goPluginApiRequest) { if (!goSupportedVersions.contains(goPluginApiRequest.apiVersion())) { throw new RuntimeException(format("Unsupported '%s' API version: %s. Supported versions: %s", goPluginApiRequest.api(), goPluginApiRequest.apiVersion(), goSupportedVersions)); } } public enum Request { CONSOLE_LOG("go.processor.artifact.console-log"); private final String requestName; Request(String requestName) { this.requestName = requestName; } public static Request fromString(String requestName) { if (requestName != null) { for (Request request : Request.values()) { if (requestName.equalsIgnoreCase(request.requestName)) { return request; } } } return null; } public String requestName() { return requestName; } } }
gocd/gocd
common/src/main/java/com/thoughtworks/go/remote/work/artifact/ArtifactRequestProcessor.java
Java
apache-2.0
5,906
package com.senseidb.search.node.impl; import org.json.JSONObject; import com.senseidb.search.node.SenseiQueryBuilder; import com.senseidb.search.node.SenseiQueryBuilderFactory; import com.senseidb.search.req.SenseiQuery; import com.senseidb.util.JSONUtil.FastJSONObject; public abstract class AbstractJsonQueryBuilderFactory implements SenseiQueryBuilderFactory { @Override public SenseiQueryBuilder getQueryBuilder(SenseiQuery query) throws Exception { JSONObject jsonQuery = null; if (query != null) { byte[] bytes = query.toBytes(); jsonQuery = new FastJSONObject(new String(bytes, SenseiQuery.utf8Charset)); } return buildQueryBuilder(jsonQuery); } public abstract SenseiQueryBuilder buildQueryBuilder(JSONObject jsonQuery); }
javasoze/sensei
sensei-core/src/main/java/com/senseidb/search/node/impl/AbstractJsonQueryBuilderFactory.java
Java
apache-2.0
774
var searchData= [ ['value',['value',['../structguac__pool__int.html#af76ff5f21c6e0f69d95cdd1385ea24a4',1,'guac_pool_int']]], ['vguac_5fclient_5fabort',['vguac_client_abort',['../client_8h.html#a4c0eccd7d0ed3dbf3e7941ce297e0224',1,'client.h']]], ['vguac_5fclient_5flog',['vguac_client_log',['../client_8h.html#a37a0fa9cfc4c02236085e3852972f494',1,'client.h']]], ['vguac_5fprotocol_5fsend_5flog',['vguac_protocol_send_log',['../protocol_8h.html#a3a783d771e1727ba2a82b2298acf4ee4',1,'protocol.h']]], ['video_5fmimetypes',['video_mimetypes',['../structguac__client__info.html#aa58dc4ee1e3b8801e9b0abbf9135d8b6',1,'guac_client_info']]] ];
mike-jumper/incubator-guacamole-website
doc/0.9.9/libguac/search/all_12.js
JavaScript
apache-2.0
644
package org.vertexium.util; import org.vertexium.Authorizations; import org.vertexium.Direction; import org.vertexium.Vertex; import java.util.Iterator; public class VerticesToEdgeIdsIterable implements Iterable<String> { private final Iterable<? extends Vertex> vertices; private final Authorizations authorizations; public VerticesToEdgeIdsIterable(Iterable<? extends Vertex> vertices, Authorizations authorizations) { this.vertices = vertices; this.authorizations = authorizations; } @Override public Iterator<String> iterator() { return new SelectManyIterable<Vertex, String>(this.vertices) { @Override public Iterable<String> getIterable(Vertex vertex) { return vertex.getEdgeIds(Direction.BOTH, authorizations); } }.iterator(); } }
visallo/vertexium
core/src/main/java/org/vertexium/util/VerticesToEdgeIdsIterable.java
Java
apache-2.0
855
package com.kit.db; public class Obj { }
BigAppOS/BigApp_Discuz_Android
libs/ZUtils/src/com/kit/db/Obj.java
Java
apache-2.0
43
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.jobmanager; import akka.actor.ActorSystem; import org.apache.flink.configuration.ConfigConstants; import org.apache.flink.configuration.Configuration; import org.apache.flink.runtime.akka.AkkaUtils; import org.apache.flink.runtime.akka.ListeningBehaviour; import org.apache.flink.runtime.blob.BlobClient; import org.apache.flink.runtime.blob.BlobKey; import org.apache.flink.runtime.client.JobExecutionException; import org.apache.flink.runtime.instance.ActorGateway; import org.apache.flink.runtime.jobgraph.JobGraph; import org.apache.flink.runtime.jobgraph.JobVertex; import org.apache.flink.runtime.jobgraph.JobVertexID; import org.apache.flink.runtime.jobgraph.SavepointRestoreSettings; import org.apache.flink.runtime.jobgraph.tasks.ExternalizedCheckpointSettings; import org.apache.flink.runtime.jobgraph.tasks.JobCheckpointingSettings; import org.apache.flink.runtime.leaderretrieval.LeaderRetrievalService; import org.apache.flink.runtime.messages.JobManagerMessages; import org.apache.flink.runtime.testingUtils.TestingUtils; import org.apache.flink.runtime.testtasks.NoOpInvokable; import org.apache.flink.runtime.util.LeaderRetrievalUtils; import org.apache.flink.util.NetUtils; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import scala.Tuple2; import scala.concurrent.Await; import scala.concurrent.Future; import scala.concurrent.duration.FiniteDuration; import java.io.IOException; import java.net.InetSocketAddress; import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * Tests that the JobManager handles Jobs correctly that fail in * the initialization during the submit phase. */ public class JobSubmitTest { private static final FiniteDuration timeout = new FiniteDuration(60000, TimeUnit.MILLISECONDS); private static ActorSystem jobManagerSystem; private static ActorGateway jmGateway; private static Configuration jmConfig; @BeforeClass public static void setupJobManager() { jmConfig = new Configuration(); int port = NetUtils.getAvailablePort(); jmConfig.setString(ConfigConstants.JOB_MANAGER_IPC_ADDRESS_KEY, "localhost"); jmConfig.setInteger(ConfigConstants.JOB_MANAGER_IPC_PORT_KEY, port); scala.Option<Tuple2<String, Object>> listeningAddress = scala.Option.apply(new Tuple2<String, Object>("localhost", port)); jobManagerSystem = AkkaUtils.createActorSystem(jmConfig, listeningAddress); // only start JobManager (no ResourceManager) JobManager.startJobManagerActors( jmConfig, jobManagerSystem, TestingUtils.defaultExecutor(), TestingUtils.defaultExecutor(), JobManager.class, MemoryArchivist.class)._1(); try { LeaderRetrievalService lrs = LeaderRetrievalUtils.createLeaderRetrievalService(jmConfig); jmGateway = LeaderRetrievalUtils.retrieveLeaderGateway( lrs, jobManagerSystem, timeout ); } catch (Exception e) { fail("Could not retrieve the JobManager gateway. " + e.getMessage()); } } @AfterClass public static void teardownJobmanager() { if (jobManagerSystem != null) { jobManagerSystem.shutdown(); } } @Test public void testFailureWhenJarBlobsMissing() { try { // create a simple job graph JobVertex jobVertex = new JobVertex("Test Vertex"); jobVertex.setInvokableClass(NoOpInvokable.class); JobGraph jg = new JobGraph("test job", jobVertex); // request the blob port from the job manager Future<Object> future = jmGateway.ask(JobManagerMessages.getRequestBlobManagerPort(), timeout); int blobPort = (Integer) Await.result(future, timeout); // upload two dummy bytes and add their keys to the job graph as dependencies BlobKey key1, key2; BlobClient bc = new BlobClient(new InetSocketAddress("localhost", blobPort), jmConfig); try { key1 = bc.put(new byte[10]); key2 = bc.put(new byte[10]); // delete one of the blobs to make sure that the startup failed bc.delete(key2); } finally { bc.close(); } jg.addBlob(key1); jg.addBlob(key2); // submit the job Future<Object> submitFuture = jmGateway.ask( new JobManagerMessages.SubmitJob( jg, ListeningBehaviour.EXECUTION_RESULT), timeout); try { Await.result(submitFuture, timeout); } catch (JobExecutionException e) { // that is what we expect assertTrue(e.getCause() instanceof IOException); } catch (Exception e) { fail("Wrong exception type"); } } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } /** * Verifies a correct error message when vertices with master initialization * (input formats / output formats) fail. */ @Test public void testFailureWhenInitializeOnMasterFails() { try { // create a simple job graph JobVertex jobVertex = new JobVertex("Vertex that fails in initializeOnMaster") { private static final long serialVersionUID = -3540303593784587652L; @Override public void initializeOnMaster(ClassLoader loader) throws Exception { throw new RuntimeException("test exception"); } }; jobVertex.setInvokableClass(NoOpInvokable.class); JobGraph jg = new JobGraph("test job", jobVertex); // submit the job Future<Object> submitFuture = jmGateway.ask( new JobManagerMessages.SubmitJob( jg, ListeningBehaviour.EXECUTION_RESULT), timeout); try { Await.result(submitFuture, timeout); } catch (JobExecutionException e) { // that is what we expect // test that the exception nesting is not too deep assertTrue(e.getCause() instanceof RuntimeException); } catch (Exception e) { fail("Wrong exception type"); } } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testAnswerFailureWhenSavepointReadFails() throws Exception { // create a simple job graph JobGraph jg = createSimpleJobGraph(); jg.setSavepointRestoreSettings(SavepointRestoreSettings.forPath("pathThatReallyDoesNotExist...")); // submit the job Future<Object> submitFuture = jmGateway.ask( new JobManagerMessages.SubmitJob(jg, ListeningBehaviour.DETACHED), timeout); Object result = Await.result(submitFuture, timeout); assertEquals(JobManagerMessages.JobResultFailure.class, result.getClass()); } private JobGraph createSimpleJobGraph() { JobVertex jobVertex = new JobVertex("Vertex"); jobVertex.setInvokableClass(NoOpInvokable.class); List<JobVertexID> vertexIdList = Collections.singletonList(jobVertex.getID()); JobGraph jg = new JobGraph("test job", jobVertex); jg.setSnapshotSettings(new JobCheckpointingSettings(vertexIdList, vertexIdList, vertexIdList, 5000, 5000, 0L, 10, ExternalizedCheckpointSettings.none(), null, true)); return jg; } }
hwstreaming/flink
flink-runtime/src/test/java/org/apache/flink/runtime/jobmanager/JobSubmitTest.java
Java
apache-2.0
7,730
/* * Copyright 2010 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.core.base.accumulators; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.io.Serializable; /** * An implementation of an accumulator capable of counting occurences */ public class CountAccumulateFunction extends AbstractAccumulateFunction<CountAccumulateFunction.CountData> { public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { } public void writeExternal(ObjectOutput out) throws IOException { } protected static class CountData implements Externalizable { public long count = 0; public CountData() {} public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { count = in.readLong(); } public void writeExternal(ObjectOutput out) throws IOException { out.writeLong(count); } } /* (non-Javadoc) * @see org.kie.base.accumulators.AccumulateFunction#createContext() */ public CountData createContext() { return new CountData(); } /* (non-Javadoc) * @see org.kie.base.accumulators.AccumulateFunction#init(java.lang.Object) */ public void init(CountData data) { data.count = 0; } /* (non-Javadoc) * @see org.kie.base.accumulators.AccumulateFunction#accumulate(java.lang.Object, java.lang.Object) */ public void accumulate(CountData data, Object value) { data.count++; } /* (non-Javadoc) * @see org.kie.base.accumulators.AccumulateFunction#reverse(java.lang.Object, java.lang.Object) */ public void reverse(CountData data, Object value) { data.count--; } /* (non-Javadoc) * @see org.kie.base.accumulators.AccumulateFunction#getResult(java.lang.Object) */ public Object getResult(CountData data) { return new Long( data.count ); } /* (non-Javadoc) * @see org.kie.base.accumulators.AccumulateFunction#supportsReverse() */ public boolean supportsReverse() { return true; } /** * {@inheritDoc} */ public Class< ? > getResultType() { return Long.class; } }
ngs-mtech/drools
drools-core/src/main/java/org/drools/core/base/accumulators/CountAccumulateFunction.java
Java
apache-2.0
2,915
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.type; import com.facebook.presto.operator.scalar.AbstractTestFunctions; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.block.BlockBuilder; import com.google.common.net.InetAddresses; import io.airlift.slice.Slices; import org.testng.annotations.Test; import static com.facebook.presto.spi.function.OperatorType.HASH_CODE; import static com.facebook.presto.spi.function.OperatorType.INDETERMINATE; import static com.facebook.presto.spi.type.BigintType.BIGINT; import static com.facebook.presto.spi.type.BooleanType.BOOLEAN; import static com.facebook.presto.spi.type.VarcharType.VARCHAR; import static com.facebook.presto.type.IpAddressType.IPADDRESS; import static com.facebook.presto.type.IpPrefixType.IPPREFIX; import static java.lang.System.arraycopy; public class TestIpPrefixOperators extends AbstractTestFunctions { @Test public void testVarcharToIpPrefixCast() { assertFunction("CAST('::ffff:1.2.3.4/24' AS IPPREFIX)", IPPREFIX, "1.2.3.0/24"); assertFunction("CAST('192.168.0.0/24' AS IPPREFIX)", IPPREFIX, "192.168.0.0/24"); assertFunction("CAST('255.2.3.4/0' AS IPPREFIX)", IPPREFIX, "0.0.0.0/0"); assertFunction("CAST('255.2.3.4/1' AS IPPREFIX)", IPPREFIX, "128.0.0.0/1"); assertFunction("CAST('255.2.3.4/2' AS IPPREFIX)", IPPREFIX, "192.0.0.0/2"); assertFunction("CAST('255.2.3.4/4' AS IPPREFIX)", IPPREFIX, "240.0.0.0/4"); assertFunction("CAST('1.2.3.4/8' AS IPPREFIX)", IPPREFIX, "1.0.0.0/8"); assertFunction("CAST('1.2.3.4/16' AS IPPREFIX)", IPPREFIX, "1.2.0.0/16"); assertFunction("CAST('1.2.3.4/24' AS IPPREFIX)", IPPREFIX, "1.2.3.0/24"); assertFunction("CAST('1.2.3.255/25' AS IPPREFIX)", IPPREFIX, "1.2.3.128/25"); assertFunction("CAST('1.2.3.255/26' AS IPPREFIX)", IPPREFIX, "1.2.3.192/26"); assertFunction("CAST('1.2.3.255/28' AS IPPREFIX)", IPPREFIX, "1.2.3.240/28"); assertFunction("CAST('1.2.3.255/30' AS IPPREFIX)", IPPREFIX, "1.2.3.252/30"); assertFunction("CAST('1.2.3.255/32' AS IPPREFIX)", IPPREFIX, "1.2.3.255/32"); assertFunction("CAST('2001:0db8:0000:0000:0000:ff00:0042:8329/128' AS IPPREFIX)", IPPREFIX, "2001:db8::ff00:42:8329/128"); assertFunction("CAST('2001:db8::ff00:42:8329/128' AS IPPREFIX)", IPPREFIX, "2001:db8::ff00:42:8329/128"); assertFunction("CAST('2001:db8:0:0:1:0:0:1/128' AS IPPREFIX)", IPPREFIX, "2001:db8::1:0:0:1/128"); assertFunction("CAST('2001:db8:0:0:1::1/128' AS IPPREFIX)", IPPREFIX, "2001:db8::1:0:0:1/128"); assertFunction("CAST('2001:db8::1:0:0:1/128' AS IPPREFIX)", IPPREFIX, "2001:db8::1:0:0:1/128"); assertFunction("CAST('2001:DB8::FF00:ABCD:12EF/128' AS IPPREFIX)", IPPREFIX, "2001:db8::ff00:abcd:12ef/128"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/0' AS IPPREFIX)", IPPREFIX, "::/0"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/1' AS IPPREFIX)", IPPREFIX, "8000::/1"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/2' AS IPPREFIX)", IPPREFIX, "c000::/2"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/4' AS IPPREFIX)", IPPREFIX, "f000::/4"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/8' AS IPPREFIX)", IPPREFIX, "ff00::/8"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/16' AS IPPREFIX)", IPPREFIX, "ffff::/16"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/32' AS IPPREFIX)", IPPREFIX, "ffff:ffff::/32"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/48' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff::/48"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/64' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff::/64"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/80' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff::/80"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/96' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff::/96"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/112' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:0/112"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/120' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ff00/120"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/124' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0/124"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/126' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffc/126"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/127' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffe/127"); assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128"); assertFunction("IPPREFIX '10.0.0.0/32'", IPPREFIX, "10.0.0.0/32"); assertFunction("IPPREFIX '64:ff9b::10.0.0.0/128'", IPPREFIX, "64:ff9b::a00:0/128"); assertInvalidCast("CAST('facebook.com/32' AS IPPREFIX)", "Cannot cast value to IPPREFIX: facebook.com/32"); assertInvalidCast("CAST('localhost/32' AS IPPREFIX)", "Cannot cast value to IPPREFIX: localhost/32"); assertInvalidCast("CAST('2001:db8::1::1/128' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 2001:db8::1::1/128"); assertInvalidCast("CAST('2001:zxy::1::1/128' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 2001:zxy::1::1/128"); assertInvalidCast("CAST('789.1.1.1/32' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 789.1.1.1/32"); assertInvalidCast("CAST('192.1.1.1' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 192.1.1.1"); assertInvalidCast("CAST('192.1.1.1/128' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 192.1.1.1/128"); } @Test public void testIpPrefixToVarcharCast() { assertFunction("CAST(IPPREFIX '::ffff:1.2.3.4/32' AS VARCHAR)", VARCHAR, "1.2.3.4/32"); assertFunction("CAST(IPPREFIX '::ffff:102:304/32' AS VARCHAR)", VARCHAR, "1.2.3.4/32"); assertFunction("CAST(IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' AS VARCHAR)", VARCHAR, "2001:db8::ff00:42:8329/128"); assertFunction("CAST(IPPREFIX '2001:db8::ff00:42:8329/128' AS VARCHAR)", VARCHAR, "2001:db8::ff00:42:8329/128"); assertFunction("CAST(IPPREFIX '2001:db8:0:0:1:0:0:1/128' AS VARCHAR)", VARCHAR, "2001:db8::1:0:0:1/128"); assertFunction("CAST(CAST('1.2.3.4/32' AS IPPREFIX) AS VARCHAR)", VARCHAR, "1.2.3.4/32"); assertFunction("CAST(CAST('2001:db8:0:0:1::1/128' AS IPPREFIX) AS VARCHAR)", VARCHAR, "2001:db8::1:0:0:1/128"); assertFunction("CAST(CAST('64:ff9b::10.0.0.0/128' AS IPPREFIX) AS VARCHAR)", VARCHAR, "64:ff9b::a00:0/128"); } @Test public void testIpPrefixToIpAddressCast() { assertFunction("CAST(IPPREFIX '1.2.3.4/32' AS IPADDRESS)", IPADDRESS, "1.2.3.4"); assertFunction("CAST(IPPREFIX '1.2.3.4/24' AS IPADDRESS)", IPADDRESS, "1.2.3.0"); assertFunction("CAST(IPPREFIX '::1/128' AS IPADDRESS)", IPADDRESS, "::1"); assertFunction("CAST(IPPREFIX '2001:db8::ff00:42:8329/128' AS IPADDRESS)", IPADDRESS, "2001:db8::ff00:42:8329"); assertFunction("CAST(IPPREFIX '2001:db8::ff00:42:8329/64' AS IPADDRESS)", IPADDRESS, "2001:db8::"); } @Test public void testIpAddressToIpPrefixCast() { assertFunction("CAST(IPADDRESS '1.2.3.4' AS IPPREFIX)", IPPREFIX, "1.2.3.4/32"); assertFunction("CAST(IPADDRESS '::ffff:102:304' AS IPPREFIX)", IPPREFIX, "1.2.3.4/32"); assertFunction("CAST(IPADDRESS '::1' AS IPPREFIX)", IPPREFIX, "::1/128"); assertFunction("CAST(IPADDRESS '2001:db8::ff00:42:8329' AS IPPREFIX)", IPPREFIX, "2001:db8::ff00:42:8329/128"); } @Test public void testEquals() { assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' = IPPREFIX '2001:db8::ff00:42:8329/128'", BOOLEAN, true); assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) = CAST('::ffff:1.2.3.4/32' AS IPPREFIX)", BOOLEAN, true); assertFunction("IPPREFIX '192.168.0.0/32' = IPPREFIX '::ffff:192.168.0.0/32'", BOOLEAN, true); assertFunction("IPPREFIX '10.0.0.0/32' = IPPREFIX '::ffff:a00:0/32'", BOOLEAN, true); assertFunction("CAST('1.2.3.4/24' AS IPPREFIX) = IPPREFIX '1.2.3.5/24'", BOOLEAN, true); assertFunction("IPPREFIX '2001:db8::ff00:42:8329/128' = IPPREFIX '2001:db8::ff00:42:8300/128'", BOOLEAN, false); assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) = IPPREFIX '1.2.3.5/32'", BOOLEAN, false); assertFunction("CAST('1.2.0.0/24' AS IPPREFIX) = IPPREFIX '1.2.0.0/25'", BOOLEAN, false); } @Test public void testDistinctFrom() { assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' IS DISTINCT FROM IPPREFIX '2001:db8::ff00:42:8329/128'", BOOLEAN, false); assertFunction("CAST(NULL AS IPPREFIX) IS DISTINCT FROM CAST(NULL AS IPPREFIX)", BOOLEAN, false); assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' IS DISTINCT FROM IPPREFIX '2001:db8::ff00:42:8328/128'", BOOLEAN, true); assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' IS DISTINCT FROM CAST(NULL AS IPPREFIX)", BOOLEAN, true); assertFunction("CAST(NULL AS IPPREFIX) IS DISTINCT FROM IPPREFIX '2001:db8::ff00:42:8328/128'", BOOLEAN, true); } @Test public void testNotEquals() { assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' != IPPREFIX '1.2.3.4/32'", BOOLEAN, true); assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) <> CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, true); assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) != IPPREFIX '1.2.3.4/32'", BOOLEAN, false); assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' <> IPPREFIX '2001:db8::ff00:42:8329/128'", BOOLEAN, false); assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) <> CAST('::ffff:1.2.3.4/32' AS IPPREFIX)", BOOLEAN, false); } @Test public void testOrderOperators() { assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' > IPPREFIX '1.2.3.4/32'", BOOLEAN, true); assertFunction("IPPREFIX '1.2.3.4/32' > IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128'", BOOLEAN, false); assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) < CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, true); assertFunction("CAST('1.2.3.5/32' AS IPPREFIX) < CAST('1.2.3.4/32' AS IPPREFIX)", BOOLEAN, false); assertFunction("CAST('1.2.0.0/24' AS IPPREFIX) < CAST('1.2.0.0/25' AS IPPREFIX)", BOOLEAN, true); assertFunction("IPPREFIX '::1/128' <= CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, true); assertFunction("IPPREFIX '1.2.3.5/32' <= CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, true); assertFunction("IPPREFIX '1.2.3.6/32' <= CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, false); assertFunction("IPPREFIX '::1/128' >= IPPREFIX '::/128'", BOOLEAN, true); assertFunction("IPPREFIX '::1/128' >= IPPREFIX '::1/128'", BOOLEAN, true); assertFunction("IPPREFIX '::/128' >= IPPREFIX '::1/128'", BOOLEAN, false); assertFunction("IPPREFIX '::1/128' BETWEEN IPPREFIX '::/128' AND IPPREFIX '::1234/128'", BOOLEAN, true); assertFunction("IPPREFIX '::2222/128' BETWEEN IPPREFIX '::/128' AND IPPREFIX '::1234/128'", BOOLEAN, false); } @Test public void testIndeterminate() { assertOperator(INDETERMINATE, "CAST(null AS IPPREFIX)", BOOLEAN, true); assertOperator(INDETERMINATE, "IPPREFIX '::2222/128'", BOOLEAN, false); } @Test public void testHash() { assertOperator(HASH_CODE, "CAST(null AS IPPREFIX)", BIGINT, null); assertOperator(HASH_CODE, "IPPREFIX '::2222/128'", BIGINT, hashFromType("::2222/128")); } private static long hashFromType(String address) { BlockBuilder blockBuilder = IPPREFIX.createBlockBuilder(null, 1); String[] parts = address.split("/"); byte[] bytes = new byte[IPPREFIX.getFixedSize()]; byte[] addressBytes = InetAddresses.forString(parts[0]).getAddress(); arraycopy(addressBytes, 0, bytes, 0, 16); bytes[IPPREFIX.getFixedSize() - 1] = (byte) Integer.parseInt(parts[1]); IPPREFIX.writeSlice(blockBuilder, Slices.wrappedBuffer(bytes)); Block block = blockBuilder.build(); return IPPREFIX.hash(block, 0); } }
ptkool/presto
presto-main/src/test/java/com/facebook/presto/type/TestIpPrefixOperators.java
Java
apache-2.0
13,240
package org.plasma.provisioning.rdb.mysql.v5_5.query; import org.plasma.provisioning.rdb.mysql.v5_5.TableColumnConstraint; import org.plasma.query.DataProperty; import org.plasma.query.Expression; import org.plasma.query.dsl.DataNode; import org.plasma.query.dsl.DomainRoot; import org.plasma.query.dsl.PathNode; import org.plasma.sdo.helper.PlasmaTypeHelper; /** * Generated Domain Specific Language (DSL) implementation class representing * the domain model entity <b>TableColumnConstraint</b>. * * <p> * </p> * <b>Data Store Mapping:</b> Corresponds to the physical data store entity * <b>REFERENTIAL_CONSTRAINTS</b>. * */ public class QTableColumnConstraint extends DomainRoot { private QTableColumnConstraint() { super(PlasmaTypeHelper.INSTANCE.getType(TableColumnConstraint.class)); } /** * Constructor which instantiates a domain query path node. A path may span * multiple namespaces and therefore Java inplementation packages based on the * <a href= * "http://docs.plasma-sdo.org/api/org/plasma/config/PlasmaConfiguration.html" * >Condiguration</a>. Note: while this constructor is public, it is not for * application use! * * @param source * the source path node * @param sourceProperty * the source property logical name */ public QTableColumnConstraint(PathNode source, String sourceProperty) { super(source, sourceProperty); } /** * Constructor which instantiates a domain query path node. A path may span * multiple namespaces and therefore Java inplementation packages based on the * <a href= * "http://docs.plasma-sdo.org/api/org/plasma/config/PlasmaConfiguration.html" * >Condiguration</a>. Note: while this constructor is public, it is not for * application use! * * @param source * the source path node * @param sourceProperty * the source property logical name * @param expr * the path predicate expression */ public QTableColumnConstraint(PathNode source, String sourceProperty, Expression expr) { super(source, sourceProperty, expr); } /** * Returns a new DSL query for <a * href="http://docs.plasma-sdo.org/api/org/plasma/sdo/PlasmaType.html" * >Type</a> <b>TableColumnConstraint</b> which can be used either as a query * root or as the start (entry point) for a new path predicate expression. * * @return a new DSL query */ public static QTableColumnConstraint newQuery() { return new QTableColumnConstraint(); } /** * Returns a DSL data element for property, <b>name</b>. * * @return a DSL data element for property, <b>name</b>. */ public DataProperty name() { return new DataNode(this, TableColumnConstraint.PROPERTY.name.name()); } /** * Returns a DSL data element for property, <b>owner</b>. * * @return a DSL data element for property, <b>owner</b>. */ public DataProperty owner() { return new DataNode(this, TableColumnConstraint.PROPERTY.owner.name()); } /** * Returns a DSL query element for reference property, <b>table</b>. * * @return a DSL query element for reference property, <b>table</b>. */ public QTable table() { return new QTable(this, TableColumnConstraint.PROPERTY.table.name()); } }
plasma-framework/plasma
plasma-provisioning/src/main/java/org/plasma/provisioning/rdb/mysql/v5_5/query/QTableColumnConstraint.java
Java
apache-2.0
3,301
// Code generated by protoc-gen-go. DO NOT EDIT. // source: hapi/chart/metadata.proto package chart import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type Metadata_Engine int32 const ( Metadata_UNKNOWN Metadata_Engine = 0 Metadata_GOTPL Metadata_Engine = 1 ) var Metadata_Engine_name = map[int32]string{ 0: "UNKNOWN", 1: "GOTPL", } var Metadata_Engine_value = map[string]int32{ "UNKNOWN": 0, "GOTPL": 1, } func (x Metadata_Engine) String() string { return proto.EnumName(Metadata_Engine_name, int32(x)) } func (Metadata_Engine) EnumDescriptor() ([]byte, []int) { return fileDescriptor_metadata_d6c714c73a051dcb, []int{1, 0} } // Maintainer describes a Chart maintainer. type Maintainer struct { // Name is a user name or organization name Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // Email is an optional email address to contact the named maintainer Email string `protobuf:"bytes,2,opt,name=email,proto3" json:"email,omitempty"` // Url is an optional URL to an address for the named maintainer Url string `protobuf:"bytes,3,opt,name=url,proto3" json:"url,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Maintainer) Reset() { *m = Maintainer{} } func (m *Maintainer) String() string { return proto.CompactTextString(m) } func (*Maintainer) ProtoMessage() {} func (*Maintainer) Descriptor() ([]byte, []int) { return fileDescriptor_metadata_d6c714c73a051dcb, []int{0} } func (m *Maintainer) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Maintainer.Unmarshal(m, b) } func (m *Maintainer) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Maintainer.Marshal(b, m, deterministic) } func (dst *Maintainer) XXX_Merge(src proto.Message) { xxx_messageInfo_Maintainer.Merge(dst, src) } func (m *Maintainer) XXX_Size() int { return xxx_messageInfo_Maintainer.Size(m) } func (m *Maintainer) XXX_DiscardUnknown() { xxx_messageInfo_Maintainer.DiscardUnknown(m) } var xxx_messageInfo_Maintainer proto.InternalMessageInfo func (m *Maintainer) GetName() string { if m != nil { return m.Name } return "" } func (m *Maintainer) GetEmail() string { if m != nil { return m.Email } return "" } func (m *Maintainer) GetUrl() string { if m != nil { return m.Url } return "" } // Metadata for a Chart file. This models the structure of a Chart.yaml file. // // Spec: https://k8s.io/helm/blob/master/docs/design/chart_format.md#the-chart-file type Metadata struct { // The name of the chart Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // The URL to a relevant project page, git repo, or contact person Home string `protobuf:"bytes,2,opt,name=home,proto3" json:"home,omitempty"` // Source is the URL to the source code of this chart Sources []string `protobuf:"bytes,3,rep,name=sources,proto3" json:"sources,omitempty"` // A SemVer 2 conformant version string of the chart Version string `protobuf:"bytes,4,opt,name=version,proto3" json:"version,omitempty"` // A one-sentence description of the chart Description string `protobuf:"bytes,5,opt,name=description,proto3" json:"description,omitempty"` // A list of string keywords Keywords []string `protobuf:"bytes,6,rep,name=keywords,proto3" json:"keywords,omitempty"` // A list of name and URL/email address combinations for the maintainer(s) Maintainers []*Maintainer `protobuf:"bytes,7,rep,name=maintainers,proto3" json:"maintainers,omitempty"` // The name of the template engine to use. Defaults to 'gotpl'. Engine string `protobuf:"bytes,8,opt,name=engine,proto3" json:"engine,omitempty"` // The URL to an icon file. Icon string `protobuf:"bytes,9,opt,name=icon,proto3" json:"icon,omitempty"` // The API Version of this chart. ApiVersion string `protobuf:"bytes,10,opt,name=apiVersion,proto3" json:"apiVersion,omitempty"` // The condition to check to enable chart Condition string `protobuf:"bytes,11,opt,name=condition,proto3" json:"condition,omitempty"` // The tags to check to enable chart Tags string `protobuf:"bytes,12,opt,name=tags,proto3" json:"tags,omitempty"` // The version of the application enclosed inside of this chart. AppVersion string `protobuf:"bytes,13,opt,name=appVersion,proto3" json:"appVersion,omitempty"` // Whether or not this chart is deprecated Deprecated bool `protobuf:"varint,14,opt,name=deprecated,proto3" json:"deprecated,omitempty"` // TillerVersion is a SemVer constraints on what version of Tiller is required. // See SemVer ranges here: https://github.com/Masterminds/semver#basic-comparisons TillerVersion string `protobuf:"bytes,15,opt,name=tillerVersion,proto3" json:"tillerVersion,omitempty"` // Annotations are additional mappings uninterpreted by Tiller, // made available for inspection by other applications. Annotations map[string]string `protobuf:"bytes,16,rep,name=annotations,proto3" json:"annotations,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // KubeVersion is a SemVer constraint specifying the version of Kubernetes required. KubeVersion string `protobuf:"bytes,17,opt,name=kubeVersion,proto3" json:"kubeVersion,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Metadata) Reset() { *m = Metadata{} } func (m *Metadata) String() string { return proto.CompactTextString(m) } func (*Metadata) ProtoMessage() {} func (*Metadata) Descriptor() ([]byte, []int) { return fileDescriptor_metadata_d6c714c73a051dcb, []int{1} } func (m *Metadata) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Metadata.Unmarshal(m, b) } func (m *Metadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Metadata.Marshal(b, m, deterministic) } func (dst *Metadata) XXX_Merge(src proto.Message) { xxx_messageInfo_Metadata.Merge(dst, src) } func (m *Metadata) XXX_Size() int { return xxx_messageInfo_Metadata.Size(m) } func (m *Metadata) XXX_DiscardUnknown() { xxx_messageInfo_Metadata.DiscardUnknown(m) } var xxx_messageInfo_Metadata proto.InternalMessageInfo func (m *Metadata) GetName() string { if m != nil { return m.Name } return "" } func (m *Metadata) GetHome() string { if m != nil { return m.Home } return "" } func (m *Metadata) GetSources() []string { if m != nil { return m.Sources } return nil } func (m *Metadata) GetVersion() string { if m != nil { return m.Version } return "" } func (m *Metadata) GetDescription() string { if m != nil { return m.Description } return "" } func (m *Metadata) GetKeywords() []string { if m != nil { return m.Keywords } return nil } func (m *Metadata) GetMaintainers() []*Maintainer { if m != nil { return m.Maintainers } return nil } func (m *Metadata) GetEngine() string { if m != nil { return m.Engine } return "" } func (m *Metadata) GetIcon() string { if m != nil { return m.Icon } return "" } func (m *Metadata) GetApiVersion() string { if m != nil { return m.ApiVersion } return "" } func (m *Metadata) GetCondition() string { if m != nil { return m.Condition } return "" } func (m *Metadata) GetTags() string { if m != nil { return m.Tags } return "" } func (m *Metadata) GetAppVersion() string { if m != nil { return m.AppVersion } return "" } func (m *Metadata) GetDeprecated() bool { if m != nil { return m.Deprecated } return false } func (m *Metadata) GetTillerVersion() string { if m != nil { return m.TillerVersion } return "" } func (m *Metadata) GetAnnotations() map[string]string { if m != nil { return m.Annotations } return nil } func (m *Metadata) GetKubeVersion() string { if m != nil { return m.KubeVersion } return "" } func init() { proto.RegisterType((*Maintainer)(nil), "hapi.chart.Maintainer") proto.RegisterType((*Metadata)(nil), "hapi.chart.Metadata") proto.RegisterMapType((map[string]string)(nil), "hapi.chart.Metadata.AnnotationsEntry") proto.RegisterEnum("hapi.chart.Metadata_Engine", Metadata_Engine_name, Metadata_Engine_value) } func init() { proto.RegisterFile("hapi/chart/metadata.proto", fileDescriptor_metadata_d6c714c73a051dcb) } var fileDescriptor_metadata_d6c714c73a051dcb = []byte{ // 435 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x52, 0x5d, 0x6b, 0xd4, 0x40, 0x14, 0x35, 0xcd, 0x66, 0x77, 0x73, 0x63, 0x35, 0x0e, 0x52, 0xc6, 0x22, 0x12, 0x16, 0x85, 0x7d, 0xda, 0x82, 0xbe, 0x14, 0x1f, 0x04, 0x85, 0x52, 0x41, 0xbb, 0x95, 0xe0, 0x07, 0xf8, 0x36, 0x4d, 0x2e, 0xdd, 0x61, 0x93, 0x99, 0x30, 0x99, 0xad, 0xec, 0xaf, 0xf0, 0x2f, 0xcb, 0xdc, 0x64, 0x9a, 0xac, 0xf4, 0xed, 0x9e, 0x73, 0x66, 0xce, 0xcc, 0xbd, 0xf7, 0xc0, 0x8b, 0x8d, 0x68, 0xe4, 0x59, 0xb1, 0x11, 0xc6, 0x9e, 0xd5, 0x68, 0x45, 0x29, 0xac, 0x58, 0x35, 0x46, 0x5b, 0xcd, 0xc0, 0x49, 0x2b, 0x92, 0x16, 0x9f, 0x01, 0xae, 0x84, 0x54, 0x56, 0x48, 0x85, 0x86, 0x31, 0x98, 0x28, 0x51, 0x23, 0x0f, 0xb2, 0x60, 0x19, 0xe7, 0x54, 0xb3, 0xe7, 0x10, 0x61, 0x2d, 0x64, 0xc5, 0x8f, 0x88, 0xec, 0x00, 0x4b, 0x21, 0xdc, 0x99, 0x8a, 0x87, 0xc4, 0xb9, 0x72, 0xf1, 0x37, 0x82, 0xf9, 0x55, 0xff, 0xd0, 0x83, 0x46, 0x0c, 0x26, 0x1b, 0x5d, 0x63, 0xef, 0x43, 0x35, 0xe3, 0x30, 0x6b, 0xf5, 0xce, 0x14, 0xd8, 0xf2, 0x30, 0x0b, 0x97, 0x71, 0xee, 0xa1, 0x53, 0xee, 0xd0, 0xb4, 0x52, 0x2b, 0x3e, 0xa1, 0x0b, 0x1e, 0xb2, 0x0c, 0x92, 0x12, 0xdb, 0xc2, 0xc8, 0xc6, 0x3a, 0x35, 0x22, 0x75, 0x4c, 0xb1, 0x53, 0x98, 0x6f, 0x71, 0xff, 0x47, 0x9b, 0xb2, 0xe5, 0x53, 0xb2, 0xbd, 0xc7, 0xec, 0x1c, 0x92, 0xfa, 0xbe, 0xe1, 0x96, 0xcf, 0xb2, 0x70, 0x99, 0xbc, 0x3d, 0x59, 0x0d, 0x23, 0x59, 0x0d, 0xf3, 0xc8, 0xc7, 0x47, 0xd9, 0x09, 0x4c, 0x51, 0xdd, 0x4a, 0x85, 0x7c, 0x4e, 0x4f, 0xf6, 0xc8, 0xf5, 0x25, 0x0b, 0xad, 0x78, 0xdc, 0xf5, 0xe5, 0x6a, 0xf6, 0x0a, 0x40, 0x34, 0xf2, 0x67, 0xdf, 0x00, 0x90, 0x32, 0x62, 0xd8, 0x4b, 0x88, 0x0b, 0xad, 0x4a, 0x49, 0x1d, 0x24, 0x24, 0x0f, 0x84, 0x73, 0xb4, 0xe2, 0xb6, 0xe5, 0x8f, 0x3b, 0x47, 0x57, 0x77, 0x8e, 0x8d, 0x77, 0x3c, 0xf6, 0x8e, 0x9e, 0x71, 0x7a, 0x89, 0x8d, 0xc1, 0x42, 0x58, 0x2c, 0xf9, 0x93, 0x2c, 0x58, 0xce, 0xf3, 0x11, 0xc3, 0x5e, 0xc3, 0xb1, 0x95, 0x55, 0x85, 0xc6, 0x5b, 0x3c, 0x25, 0x8b, 0x43, 0x92, 0x5d, 0x42, 0x22, 0x94, 0xd2, 0x56, 0xb8, 0x7f, 0xb4, 0x3c, 0xa5, 0xe9, 0xbc, 0x39, 0x98, 0x8e, 0xcf, 0xd2, 0xc7, 0xe1, 0xdc, 0x85, 0xb2, 0x66, 0x9f, 0x8f, 0x6f, 0xba, 0x25, 0x6d, 0x77, 0x37, 0xe8, 0x1f, 0x7b, 0xd6, 0x2d, 0x69, 0x44, 0x9d, 0x7e, 0x80, 0xf4, 0x7f, 0x0b, 0x97, 0xaa, 0x2d, 0xee, 0xfb, 0xd4, 0xb8, 0xd2, 0xa5, 0xef, 0x4e, 0x54, 0x3b, 0x9f, 0x9a, 0x0e, 0xbc, 0x3f, 0x3a, 0x0f, 0x16, 0x19, 0x4c, 0x2f, 0xba, 0x05, 0x24, 0x30, 0xfb, 0xb1, 0xfe, 0xb2, 0xbe, 0xfe, 0xb5, 0x4e, 0x1f, 0xb1, 0x18, 0xa2, 0xcb, 0xeb, 0xef, 0xdf, 0xbe, 0xa6, 0xc1, 0xa7, 0xd9, 0xef, 0x88, 0xfe, 0x7c, 0x33, 0xa5, 0xdc, 0xbf, 0xfb, 0x17, 0x00, 0x00, 0xff, 0xff, 0x36, 0xf9, 0x0d, 0xa6, 0x14, 0x03, 0x00, 0x00, }
appscode/helm
pkg/proto/hapi/chart/metadata.pb.go
GO
apache-2.0
11,564
/* * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is * regenerated. */ 'use strict'; /** * @class * Initializes a new instance of the DataSourceListResult class. * @constructor * The list data source by workspace operation response. * * @member {string} [nextLink] The link (url) to the next page of datasources. * */ class DataSourceListResult extends Array { constructor() { super(); } /** * Defines the metadata of DataSourceListResult * * @returns {object} metadata of DataSourceListResult * */ mapper() { return { required: false, serializedName: 'DataSourceListResult', type: { name: 'Composite', className: 'DataSourceListResult', modelProperties: { value: { required: false, serializedName: '', type: { name: 'Sequence', element: { required: false, serializedName: 'DataSourceElementType', type: { name: 'Composite', className: 'DataSource' } } } }, nextLink: { required: false, serializedName: 'nextLink', type: { name: 'String' } } } } }; } } module.exports = DataSourceListResult;
AuxMon/azure-sdk-for-node
lib/services/operationalInsightsManagement/lib/models/dataSourceListResult.js
JavaScript
apache-2.0
1,648
/* * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.flex.compiler.problems; import org.apache.flex.compiler.common.ISourceLocation; /** * This problem gets created when a variable has two Embed meta data tags * associated with it. */ public final class EmbedMultipleMetaTagsProblem extends CompilerProblem { public static final String DESCRIPTION = "A variable can only only have one [${EMBED}] metadata tag"; public static final int errorCode = 1344; public EmbedMultipleMetaTagsProblem(ISourceLocation site) { super(site); } // Prevent these from being localized. public final String EMBED = "Embed"; }
adufilie/flex-falcon
compiler/src/org/apache/flex/compiler/problems/EmbedMultipleMetaTagsProblem.java
Java
apache-2.0
1,453
/* * Copyright 2022 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.server.materials; import com.thoughtworks.go.config.CruiseConfig; import com.thoughtworks.go.config.PipelineConfig; import com.thoughtworks.go.config.materials.dependency.DependencyMaterial; import com.thoughtworks.go.config.remote.ConfigRepoConfig; import com.thoughtworks.go.domain.materials.Material; import com.thoughtworks.go.domain.packagerepository.PackageDefinition; import com.thoughtworks.go.domain.packagerepository.PackageRepository; import com.thoughtworks.go.domain.scm.SCM; import com.thoughtworks.go.listener.ConfigChangedListener; import com.thoughtworks.go.listener.EntityConfigChangedListener; import com.thoughtworks.go.server.service.GoConfigService; import com.thoughtworks.go.server.service.MaterialConfigConverter; import com.thoughtworks.go.util.SystemEnvironment; import org.slf4j.Logger; import org.joda.time.DateTimeUtils; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; /** * Provides a list of unique SCMMaterials to be updated which will be consumed by MaterialUpdateService */ @Component public class SCMMaterialSource extends EntityConfigChangedListener<ConfigRepoConfig> implements ConfigChangedListener, MaterialSource, MaterialUpdateCompleteListener { private static final Logger LOGGER = LoggerFactory.getLogger(SCMMaterialSource.class); private final GoConfigService goConfigService; private ConcurrentMap<Material, Long> materialLastUpdateTimeMap = new ConcurrentHashMap<>(); private final MaterialConfigConverter materialConfigConverter; private final MaterialUpdateService materialUpdateService; private final long materialUpdateInterval; private Set<Material> schedulableMaterials; @Autowired public SCMMaterialSource(GoConfigService goConfigService, SystemEnvironment systemEnvironment, MaterialConfigConverter materialConfigConverter, MaterialUpdateService materialUpdateService) { this.goConfigService = goConfigService; this.materialConfigConverter = materialConfigConverter; this.materialUpdateService = materialUpdateService; this.materialUpdateInterval = systemEnvironment.getMaterialUpdateIdleInterval(); } public void initialize() { goConfigService.register(this); goConfigService.register(new InternalConfigChangeListener() { @Override public void onEntityConfigChange(Object entity) { updateSchedulableMaterials(true); } }); materialUpdateService.registerMaterialSources(this); materialUpdateService.registerMaterialUpdateCompleteListener(this); } @Override public Set<Material> materialsForUpdate() { updateSchedulableMaterials(false); return materialsWithUpdateIntervalElapsed(); } @Override public void onMaterialUpdate(Material material) { if (!(material instanceof DependencyMaterial)) { updateLastUpdateTimeForScmMaterial(material); } } @Override public void onConfigChange(CruiseConfig newCruiseConfig) { updateSchedulableMaterials(true); } @Override public void onEntityConfigChange(ConfigRepoConfig entity) { updateSchedulableMaterials(true); } protected EntityConfigChangedListener<PipelineConfig> pipelineConfigChangedListener() { final SCMMaterialSource self = this; return new EntityConfigChangedListener<PipelineConfig>() { @Override public void onEntityConfigChange(PipelineConfig pipelineConfig) { self.onConfigChange(null); } }; } private Set<Material> materialsWithUpdateIntervalElapsed() { Set<Material> materialsForUpdate = new HashSet<>(); for (Material material : schedulableMaterials) { if (hasUpdateIntervalElapsedForScmMaterial(material)) { materialsForUpdate.add(material); } } return materialsForUpdate; } boolean hasUpdateIntervalElapsedForScmMaterial(Material material) { Long lastMaterialUpdateTime = materialLastUpdateTimeMap.get(material); if (lastMaterialUpdateTime != null) { boolean shouldUpdateMaterial = (DateTimeUtils.currentTimeMillis() - lastMaterialUpdateTime) >= materialUpdateInterval; if (LOGGER.isDebugEnabled() && !shouldUpdateMaterial) { LOGGER.debug("[Material Update] Skipping update of material {} which has been last updated at {}", material, new Date(lastMaterialUpdateTime)); } return shouldUpdateMaterial; } return true; } private void updateLastUpdateTimeForScmMaterial(Material material) { materialLastUpdateTimeMap.put(material, DateTimeUtils.currentTimeMillis()); } private void updateSchedulableMaterials(boolean forceLoad) { if (forceLoad || schedulableMaterials == null) { schedulableMaterials = materialConfigConverter.toMaterials(goConfigService.getSchedulableSCMMaterials()); } } private abstract class InternalConfigChangeListener extends EntityConfigChangedListener<Object> { private final List<Class<?>> securityConfigClasses = Arrays.asList( PipelineConfig.class, PackageDefinition.class, PackageRepository.class, SCM.class ); @Override public boolean shouldCareAbout(Object entity) { return securityConfigClasses.stream().anyMatch(aClass -> aClass.isAssignableFrom(entity.getClass())); } } }
Skarlso/gocd
server/src/main/java/com/thoughtworks/go/server/materials/SCMMaterialSource.java
Java
apache-2.0
6,425
/* * Copyright 2020 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.optaplanner.core.impl.score.stream.drools.quad; import java.util.function.Function; import java.util.function.Supplier; import org.optaplanner.core.api.function.PentaFunction; import org.optaplanner.core.api.function.QuadFunction; import org.optaplanner.core.api.score.stream.quad.QuadConstraintCollector; import org.optaplanner.core.impl.score.stream.drools.common.BiTuple; import org.optaplanner.core.impl.score.stream.drools.common.DroolsAbstractUniCollectingGroupByCollectorProcessor; import org.optaplanner.core.impl.score.stream.drools.common.QuadTuple; import org.optaplanner.core.impl.score.stream.drools.common.TriTuple; final class DroolsQuadToTriGroupByCollectorProcessor<A, B, C, D, ResultContainer, NewA, NewB, NewC> extends DroolsAbstractUniCollectingGroupByCollectorProcessor<ResultContainer, QuadTuple<A, B, C, D>, BiTuple<NewA, NewB>, TriTuple<NewA, NewB, NewC>> { private final QuadFunction<A, B, C, D, NewA> groupKeyAMapping; private final QuadFunction<A, B, C, D, NewB> groupKeyBMapping; private final Supplier<ResultContainer> supplier; private final PentaFunction<ResultContainer, A, B, C, D, Runnable> accumulator; private final Function<ResultContainer, NewC> finisher; public DroolsQuadToTriGroupByCollectorProcessor(QuadFunction<A, B, C, D, NewA> groupKeyAMapping, QuadFunction<A, B, C, D, NewB> groupKeyBMapping, QuadConstraintCollector<A, B, C, D, ResultContainer, NewC> collector) { this.groupKeyAMapping = groupKeyAMapping; this.groupKeyBMapping = groupKeyBMapping; this.supplier = collector.supplier(); this.accumulator = collector.accumulator(); this.finisher = collector.finisher(); } @Override protected BiTuple<NewA, NewB> toKey(QuadTuple<A, B, C, D> abcdQuadTuple) { return new BiTuple<>(groupKeyAMapping.apply(abcdQuadTuple.a, abcdQuadTuple.b, abcdQuadTuple.c, abcdQuadTuple.d), groupKeyBMapping.apply(abcdQuadTuple.a, abcdQuadTuple.b, abcdQuadTuple.c, abcdQuadTuple.d)); } @Override protected ResultContainer newContainer() { return supplier.get(); } @Override protected Runnable process(QuadTuple<A, B, C, D> abcdQuadTuple, ResultContainer container) { return accumulator.apply(container, abcdQuadTuple.a, abcdQuadTuple.b, abcdQuadTuple.c, abcdQuadTuple.d); } @Override protected TriTuple<NewA, NewB, NewC> toResult(BiTuple<NewA, NewB> key, ResultContainer container) { return new TriTuple<>(key.a, key.b, finisher.apply(container)); } }
ge0ffrey/optaplanner
optaplanner-core/src/main/java/org/optaplanner/core/impl/score/stream/drools/quad/DroolsQuadToTriGroupByCollectorProcessor.java
Java
apache-2.0
3,217
# # Author:: Daniel DeLeo (<dan@opscode.com>) # Copyright:: Copyright (c) 2013 Opscode, Inc. # License:: Apache License, Version 2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require "support/shared/integration/integration_helper" require "chef/mixin/shell_out" describe "chef-client" do include IntegrationSupport include Chef::Mixin::ShellOut let(:chef_zero_opts) { {:host => "::1"} } let(:validation_pem) do <<-END_VALIDATION_PEM -----BEGIN RSA PRIVATE KEY----- MIIEogIBAAKCAQEApubutqtYYQ5UiA9QhWP7UvSmsfHsAoPKEVVPdVW/e8Svwpyf 0Xef6OFWVmBE+W442ZjLOe2y6p2nSnaq4y7dg99NFz6X+16mcKiCbj0RCiGqCvCk NftHhTgO9/RFvCbmKZ1RKNob1YzLrFpxBHaSh9po+DGWhApcd+I+op+ZzvDgXhNn 0nauZu3rZmApI/r7EEAOjFedAXs7VPNXhhtZAiLSAVIrwU3ZajtSzgXOxbNzgj5O AAAMmThK+71qPdffAdO4J198H6/MY04qgtFo7vumzCq0UCaGZfmeI1UNE4+xQWwP HJ3pDAP61C6Ebx2snI2kAd9QMx9Y78nIedRHPwIDAQABAoIBAHssRtPM1GacWsom 8zfeN6ZbI4KDlbetZz0vhnqDk9NVrpijWlcOP5dwZXVNitnB/HaqCqFvyPDY9JNB zI/pEFW4QH59FVDP42mVEt0keCTP/1wfiDDGh1vLqVBYl/ZphscDcNgDTzNkuxMx k+LFVxKnn3w7rGc59lALSkpeGvbbIDjp3LUMlUeCF8CIFyYZh9ZvXe4OCxYdyjxb i8tnMLKvJ4Psbh5jMapsu3rHQkfPdqzztQUz8vs0NYwP5vWge46FUyk+WNm/IhbJ G3YM22nwUS8Eu2bmTtADSJolATbCSkOwQ1D+Fybz/4obfYeGaCdOqB05ttubhenV ShsAb7ECgYEA20ecRVxw2S7qA7sqJ4NuYOg9TpfGooptYNA1IP971eB6SaGAelEL awYkGNuu2URmm5ElZpwJFFTDLGA7t2zB2xI1FeySPPIVPvJGSiZoFQOVlIg9WQzK 7jTtFQ/tOMrF+bigEUJh5bP1/7HzqSpuOsPjEUb2aoCTp+tpiRGL7TUCgYEAwtns g3ysrSEcTzpSv7fQRJRk1lkBhatgNd0oc+ikzf74DaVLhBg1jvSThDhiDCdB59mr Jh41cnR1XqE8jmdQbCDRiFrI1Pq6TPaDZFcovDVE1gue9x86v3FOH2ukPG4d2/Xy HevXjThtpMMsWFi0JYXuzXuV5HOvLZiP8sN3lSMCgYANpdxdGM7RRbE9ADY0dWK2 V14ReTLcxP7fyrWz0xLzEeCqmomzkz3BsIUoouu0DCTSw+rvAwExqcDoDylIVlWO fAifz7SeZHbcDxo+3TsXK7zwnLYsx7YNs2+aIv6hzUUbMNmNmXMcZ+IEwx+mRMTN lYmZdrA5mr0V83oDFPt/jQKBgC74RVE03pMlZiObFZNtheDiPKSG9Bz6wMh7NWMr c37MtZLkg52mEFMTlfPLe6ceV37CM8WOhqe+dwSGrYhOU06dYqUR7VOZ1Qr0aZvo fsNPu/Y0+u7rMkgv0fs1AXQnvz7kvKaF0YITVirfeXMafuKEtJoH7owRbur42cpV YCAtAoGAP1rHOc+w0RUcBK3sY7aErrih0OPh9U5bvJsrw1C0FIZhCEoDVA+fNIQL syHLXYFNy0OxMtH/bBAXBGNHd9gf5uOnqh0pYcbe/uRAxumC7Rl0cL509eURiA2T +vFmf54y9YdnLXaqv+FhJT6B6V7WX7IpU9BMqJY1cJYXHuHG2KA= -----END RSA PRIVATE KEY----- END_VALIDATION_PEM end let(:cache_path) do Dir.mktmpdir end let(:basic_config_file) do <<-END_CLIENT_RB chef_server_url "http://[::1]:8900" validation_key '#{path_to('config/validator.pem')}' cache_path '#{cache_path}' client_key '#{cache_path}/client.pem' END_CLIENT_RB end let(:client_rb_content) do basic_config_file end let(:chef_dir) { File.join(File.dirname(__FILE__), "..", "..", "..", "bin") } let(:chef_client_cmd) { %Q{ruby '#{chef_dir}/chef-client' --minimal-ohai -c "#{path_to('config/client.rb')}" -lwarn} } after do FileUtils.rm_rf(cache_path) end # Some Solaris test platforms are too old for IPv6. These tests should not # otherwise be platform dependent, so exclude solaris when_the_chef_server "is running on IPv6", :not_supported_on_solaris, :not_supported_on_gce do when_the_repository "has a cookbook with a no-op recipe" do before do cookbook "noop", "1.0.0", { }, "recipes" => {"default.rb" => "#raise 'foo'"} file "config/client.rb", client_rb_content file "config/validator.pem", validation_pem end it "should complete with success" do result = shell_out("#{chef_client_cmd} -o 'noop::default'", :cwd => chef_dir) result.error! end end when_the_repository "has a cookbook that hits server APIs" do before do recipe=<<-END_RECIPE actual_item = data_bag_item("expect_bag", "expect_item") if actual_item.key?("expect_key") and actual_item["expect_key"] == "expect_value" Chef::Log.info "lookin good" else Chef::Log.error("!" * 80) raise "unexpected data bag item content \#{actual_item.inspect}" Chef::Log.error("!" * 80) end END_RECIPE data_bag("expect_bag", { "expect_item" => {"expect_key" => "expect_value"} }) cookbook "api-smoke-test", "1.0.0", { }, "recipes" => {"default.rb" => recipe} end before do file "config/client.rb", client_rb_content file "config/validator.pem", validation_pem end it "should complete with success" do result = shell_out("#{chef_client_cmd} -o 'api-smoke-test::default'", :cwd => chef_dir) result.error! end end end end
BackSlasher/chef
spec/integration/client/ipv6_spec.rb
Ruby
apache-2.0
4,937
/** * Copyright (C) 2013-2016 The Rythm Engine project * for LICENSE and other details see: * https://github.com/rythmengine/rythmengine */ package org.rythmengine.cache; /*- * #%L * Rythm Template Engine * %% * Copyright (C) 2017 - 2021 OSGL (Open Source General Library) * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.rythmengine.extension.ICacheService; import org.rythmengine.extension.ICacheServiceFactory; /** * Created with IntelliJ IDEA. * User: luog * Date: 2/12/13 * Time: 8:45 AM * To change this template use File | Settings | File Templates. */ class EhCacheServiceFactory implements ICacheServiceFactory { @Override public ICacheService get() { return EhCacheService.INSTANCE; } }
rythmengine/rythmengine
src/main/java/org/rythmengine/cache/EhCacheServiceFactory.java
Java
apache-2.0
1,271
/** * Licensed to the Austrian Association for Software Tool Integration (AASTI) * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. The AASTI licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openengsb.core.api.security; /** * Serves as baseclass for all Security-related Exceptions in the OpenEngSB (similar to * {@link java.security.GeneralSecurityException} * */ public abstract class OpenEngSBSecurityException extends Exception { private static final long serialVersionUID = -2939758040088724227L; public OpenEngSBSecurityException() { } public OpenEngSBSecurityException(String message, Throwable cause) { super(message, cause); } public OpenEngSBSecurityException(String message) { super(message); } public OpenEngSBSecurityException(Throwable cause) { super(cause); } }
openengsb-attic/openengsb-api
src/main/java/org/openengsb/core/api/security/OpenEngSBSecurityException.java
Java
apache-2.0
1,499
package org.andidev.applicationname.format.custom; import java.util.Locale; import org.andidev.applicationname.format.annotation.CustomFormat; import org.apache.commons.lang3.StringUtils; import org.springframework.expression.EvaluationContext; import org.springframework.expression.ExpressionParser; import org.springframework.expression.spel.SpelParseException; import org.springframework.expression.spel.standard.SpelExpressionParser; import org.springframework.format.Printer; public class CustomPrinter implements Printer<Object> { private final String spelExpression; private final EvaluationContext evaluationContext; public CustomPrinter(String spelExpression, EvaluationContext evaluationContext) { this.spelExpression = StringUtils.defaultIfBlank(spelExpression, null); this.evaluationContext = evaluationContext; } @Override public String print(Object object, Locale locale) { if (spelExpression == null) { return null; } ExpressionParser parser = new SpelExpressionParser(); try { Object result = parser.parseExpression(spelExpression).getValue(evaluationContext, object); return result.toString(); } catch (SpelParseException e) { throw new CustomFormatException("Could not parse spel expression = \"" + spelExpression + "\" in " + CustomFormat.class.getSimpleName() + " annotation: " + e.getMessage()); } } }
andidev/spring-bootstrap-enterprise
src/main/java/org/andidev/applicationname/format/custom/CustomPrinter.java
Java
apache-2.0
1,468
/* * Copyright 2015 - 2017 Atlarge Research Team, * operating at Technische Universiteit Delft * and Vrije Universiteit Amsterdam, the Netherlands. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package science.atlarge.granula.modeller.rule.derivation.time; import science.atlarge.granula.modeller.platform.info.BasicInfo; import science.atlarge.granula.modeller.platform.info.InfoSource; import science.atlarge.granula.modeller.platform.operation.Operation; import science.atlarge.granula.modeller.rule.derivation.DerivationRule; import science.atlarge.granula.modeller.platform.info.Info; import science.atlarge.granula.modeller.platform.info.Source; import java.util.ArrayList; import java.util.List; public class ParentalEndTimeDerivation extends DerivationRule { public ParentalEndTimeDerivation(int level) { super(level); } @Override public boolean execute() { Operation operation = (Operation) entity; Operation parent = operation.getParent(); Info sourceInfo = parent.getInfo("EndTime"); long endTime = Long.parseLong(sourceInfo.getValue()); BasicInfo info = new BasicInfo("EndTime"); List<Source> sources = new ArrayList<>(); sources.add(new InfoSource("ParentalEndTime", sourceInfo)); info.setDescription("The [EndTime] of an (abstract) operation is derived from the largest value of [FilialEndTimes], which are [EndTime]s of all child operations."); info.addInfo(String.valueOf(endTime), sources); operation.addInfo(info); return true; } }
tudelft-atlarge/granula
granula-modeller/src/main/java/science/atlarge/granula/modeller/rule/derivation/time/ParentalEndTimeDerivation.java
Java
apache-2.0
2,098
/* * Copyright (C) 2015 Apptik Project * Copyright (C) 2014 Kalin Maldzhanski * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.apptik.comm.jus.error; import io.apptik.comm.jus.NetworkResponse; /** * Error indicating that there was an authentication failure when performing a Request. */ @SuppressWarnings("serial") public class AuthError extends RequestError { public AuthError(NetworkResponse response) { super(response); } public AuthError(NetworkResponse response, String exceptionMessage) { super(response, exceptionMessage); } public AuthError(NetworkResponse response, String exceptionMessage, Throwable reason) { super(response, exceptionMessage, reason); } public AuthError(NetworkResponse response, Throwable reason) { super(response, reason); } }
djodjoni/jus
jus-java/src/main/java/io/apptik/comm/jus/error/AuthError.java
Java
apache-2.0
1,412
/*! * UI development toolkit for HTML5 (OpenUI5) * (c) Copyright 2009-2016 SAP SE or an SAP affiliate company. * Licensed under the Apache License, Version 2.0 - see LICENSE.txt. */ // Provides control sap.m.PageAccessibleLandmarkInfo. sap.ui.define(['sap/ui/core/Element', './library'], function(Element, library) { "use strict"; /** * Constructor for a new <code>sap.m.PageAccessibleLandmarkInfo</code> element. * * @param {string} [sId] Id for the new element, generated automatically if no id is given * @param {object} [mSettings] Initial settings for the new element * * @class * Settings for accessible landmarks which can be applied to the container elements of a <code>sap.m.Page</code> control. * These landmarks are e.g. used by assistive technologies (like screenreaders) to provide a meaningful page overview. * @extends sap.ui.core.Element * * @author SAP SE * @version 1.42.8 * * @constructor * @public * @alias sap.m.PageAccessibleLandmarkInfo * @ui5-metamodel This control/element also will be described in the UI5 (legacy) designtime metamodel */ var PageAccessibleLandmarkInfo = Element.extend("sap.m.PageAccessibleLandmarkInfo", /** @lends sap.m.PageAccessibleLandmarkInfo.prototype */ { metadata : { library : "sap.m", properties : { /** * Landmark role of the root container of the corresponding <code>sap.m.Page</code> control. * * If set to <code>sap.ui.core.AccessibleLandmarkRole.None</code>, no landmark will be added to the container. */ rootRole : {type : "sap.ui.core.AccessibleLandmarkRole", defaultValue : "Region"}, /** * Texts which describes the landmark of the root container of the corresponding <code>sap.m.Page</code> control. * * If not set (and a landmark different than <code>sap.ui.core.AccessibleLandmarkRole.None</code> is defined), a predefined text * is used. */ rootLabel : {type : "string", defaultValue : null}, /** * Landmark role of the content container of the corresponding <code>sap.m.Page</code> control. * * If set to <code>sap.ui.core.AccessibleLandmarkRole.None</code>, no landmark will be added to the container. */ contentRole : {type : "sap.ui.core.AccessibleLandmarkRole", defaultValue : "Main"}, /** * Texts which describes the landmark of the content container of the corresponding <code>sap.m.Page</code> control. * * If not set (and a landmark different than <code>sap.ui.core.AccessibleLandmarkRole.None</code> is defined), a predefined text * is used. */ contentLabel : {type : "string", defaultValue : null}, /** * Landmark role of the header container of the corresponding <code>sap.m.Page</code> control. * * If set to <code>sap.ui.core.AccessibleLandmarkRole.None</code>, no landmark will be added to the container. */ headerRole : {type : "sap.ui.core.AccessibleLandmarkRole", defaultValue : "Region"}, /** * Texts which describes the landmark of the header container of the corresponding <code>sap.m.Page</code> control. * * If not set (and a landmark different than <code>sap.ui.core.AccessibleLandmarkRole.None</code> is defined), a predefined text * is used. */ headerLabel : {type : "string", defaultValue : null}, /** * Landmark role of the subheader container of the corresponding <code>sap.m.Page</code> control. * * If set to <code>sap.ui.core.AccessibleLandmarkRole.None</code>, no landmark will be added to the container. */ subHeaderRole : {type : "sap.ui.core.AccessibleLandmarkRole", defaultValue : null}, /** * Texts which describes the landmark of the subheader container of the corresponding <code>sap.m.Page</code> control. * * If not set (and a landmark different than <code>sap.ui.core.AccessibleLandmarkRole.None</code> is defined), a predefined text * is used. */ subHeaderLabel : {type : "string", defaultValue : null}, /** * Landmark role of the footer container of the corresponding <code>sap.m.Page</code> control. * * If set to <code>sap.ui.core.AccessibleLandmarkRole.None</code>, no landmark will be added to the container. */ footerRole : {type : "sap.ui.core.AccessibleLandmarkRole", defaultValue : "Region"}, /** * Texts which describes the landmark of the header container of the corresponding <code>sap.m.Page</code> control. * * If not set (and a landmark different than <code>sap.ui.core.AccessibleLandmarkRole.None</code> is defined), a predefined text * is used. */ footerLabel : {type : "string", defaultValue : null} } }}); /** * Returns the landmark information of the given <code>sap.m.PageAccessibleLandmarkInfo</code> instance * of the given area (e.g. <code>"root"</code>). * * Must only be used with the <code>sap.m.Page</code> control! * * @private */ PageAccessibleLandmarkInfo._getLandmarkInfo = function(oInstance, sArea) { if (!oInstance) { return null; } var sRole = null; var sText = null; var oPropertyInfo = oInstance.getMetadata().getProperty(sArea + "Role"); if (oPropertyInfo) { sRole = oInstance[oPropertyInfo._sGetter](); } if (!sRole) { return null; } oPropertyInfo = oInstance.getMetadata().getProperty(sArea + "Label"); if (oPropertyInfo) { sText = oInstance[oPropertyInfo._sGetter](); } return [sRole.toLowerCase(), sText]; }; /** * Writes the landmark information of the given page and area (e.g. <code>"root"</code>). * * Must only be used with the <code>sap.m.Page</code> control! * * @private */ PageAccessibleLandmarkInfo._writeLandmarkInfo = function(oRm, oPage, sArea) { if (!sap.ui.getCore().getConfiguration().getAccessibility()) { return; } var oInfo = PageAccessibleLandmarkInfo._getLandmarkInfo(oPage.getLandmarkInfo(), sArea); if (!oInfo) { return; } var oLandMarks = { role: oInfo[0] }; if (oInfo[1]) { oLandMarks["label"] = oInfo[1]; } oRm.writeAccessibilityState(oPage, oLandMarks); }; return PageAccessibleLandmarkInfo; });
icgretethe/AwarenessApp
resources/sap/m/PageAccessibleLandmarkInfo-dbg.js
JavaScript
apache-2.0
6,098
/** * File: app/project/ProjOpen.js * Author: liusha */ Ext.define('xdfn.project.ProjOpen', { extend: 'xdfn.project.ui.ProjOpen', grid: null, initComponent: function() { var me = this; me.openStore = Ext.create('xdfn.project.store.ProjOpenJsonStore'); me.rowEditing = Ext.create('Ext.grid.plugin.RowEditing', { errorSummary: false }); me.callParent(arguments); me.down('button[text="增加记录"]').on('click', me.OnAddProjOpenBtnClick, me); me.down('button[text="删除记录"]').on('click', me.OnDeleteProjOpenBtnClick, me); me.down('button[text="导出"]').on('click', me.OnExportProjOpenBtnClick, me); me.rowEditing.on('edit', me.OnGridEdit, me); me.rowEditing.on('beforeedit', me.OnGridBeforeEdit, me); }, OnGridBeforeEdit: function(editor, e, epts) { xdfn.user.Rights.noRights('XMGL-XMZL-31', function() { editor.cancelEdit(); }); }, OnGridEdit: function(editor, e) { var me = this; if (!e.record.dirty) return; var url = './proExec.do?method=modifyKbjl'; if (Ext.isEmpty(e.record.get('ID_VIEW'))) { var rows = me.grid.getSelectionModel().getSelection(); e.record.set('ID_VIEW', rows[0].get('ID_VIEW')); url = './proExec.do?method=addProKbjl'; } e.record.commit(); Ext.Ajax.request({ url: url, method: 'post', params: { ID: e.record.get('ID_VIEW'), V_MANU: e.record.get('V_MANU_VIEW'), V_MACHINE: e.record.get('V_MACHINE_VIEW'), N_CAP: e.record.get('N_CAP_VIEW'), N_SUM_NUM: e.record.get('N_SUM_NUM_VIEW'), N_SUM_MONEY: e.record.get('N_SUM_MONEY_VIEW'), V_MEMO: e.record.get('V_MEMO_VIEW') }, success: function(response, opts) { var result = Ext.JSON.decode(response.responseText); //服务端返回新建ID e.record.set(result.data); e.record.commit(); }, failure: function(response, opts) { Ext.Msg.alert('提示','提交失败!'); } }); }, OnAddProjOpenBtnClick: function(self, e, options) { var me = this, sm = me.grid.getSelectionModel(), rows = sm.getSelection(); xdfn.user.Rights.hasRights('XMGL-XMZL-30', function() { if (rows.length > 0) { me.rowEditing.cancelEdit(); me.openStore.insert(0, {}); me.rowEditing.startEdit(0, 0); } else { Ext.Msg.alert('提示','请先选择相应的项目!'); } }); }, OnDeleteProjOpenBtnClick: function(self, e, options) { var me = this, grid = self.up('gridpanel'), store = grid.getStore(), sm = grid.getSelectionModel(), rows = sm.getSelection(); xdfn.user.Rights.hasRights('XMGL-XMZL-32', function() { if (rows.length > 0) { if (Ext.isEmpty(rows[0].get('ID_VIEW'))) { me.rowEditing.cancelEdit(); var i = store.indexOf(rows[0]); store.remove(rows); var count = store.getCount(); if (count > 0) { sm.select((i == count)? --i : i); } return; } Ext.MessageBox.confirm('提示', '确定删除该记录吗?', function(id) { if (id == 'yes') { //TODO 删除记录 Ext.Ajax.request({ url: './proExec.do?method=deleteKbjl', //改为实际的删除请求url method: 'get', params: { ID: rows[0].get('ID_VIEW') }, success: function(response, opts) { me.rowEditing.cancelEdit(); var i = store.indexOf(rows[0]); store.remove(rows); var count = store.getCount(); if (count > 0) { sm.select((i == count)? --i : i); } }, failure: function(response, opts) { Ext.Msg.alert('提示','删除失败!'); } }); } }); } else { Ext.Msg.alert('提示','请选择要删除的记录!'); } }); }, OnExportProjOpenBtnClick: function(self, e, options) { var me = this; //导出为excel文件 xdfn.user.Rights.hasRights('XMGL-XMZL-33', function() { me.openStore.load({ limit: me.openStore.getTotalCount(), scope: this, callback: function(records, operation, success) { var excelXml = Ext.ux.exporter.Exporter.exportGrid(self.up('gridpanel'), 'excel', {title: '项目开标记录'}); document.location = 'data:application/vnd.ms-excel;base64,' + Ext.ux.exporter.Base64.encode(excelXml); } }); }); } });
flowsha/zhwh
web_root/app/static/app/project/ProjOpen.js
JavaScript
apache-2.0
4,986
/* * Copyright 2006-2008 Kazuyuki Shudo. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package dhtaccess.tools; import java.io.UnsupportedEncodingException; import java.net.MalformedURLException; import java.util.Properties; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.cli.PosixParser; import dhtaccess.core.DHTAccessor; public class Remove { private static final String COMMAND = "rm"; private static final String ENCODE = "UTF-8"; private static final String DEFAULT_GATEWAY = "http://opendht.nyuld.net:5851/"; private static void usage(String command) { System.out.println("usage: " + command + " [-h] [-g <gateway>] [-t <ttl (sec)>] <key> <value> <secret>"); } public static void main(String[] args) { int ttl = 3600; // parse properties Properties prop = System.getProperties(); String gateway = prop.getProperty("dhtaccess.gateway"); if (gateway == null || gateway.length() <= 0) { gateway = DEFAULT_GATEWAY; } // parse options Options options = new Options(); options.addOption("h", "help", false, "print help"); options.addOption("g", "gateway", true, "gateway URI, list at http://opendht.org/servers.txt"); options.addOption("t", "ttl", true, "how long (in seconds) to store the value"); CommandLineParser parser = new PosixParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (ParseException e) { System.out.println("There is an invalid option."); e.printStackTrace(); System.exit(1); } String optVal; if (cmd.hasOption('h')) { usage(COMMAND); System.exit(1); } optVal = cmd.getOptionValue('g'); if (optVal != null) { gateway = optVal; } optVal = cmd.getOptionValue('t'); if (optVal != null) { ttl = Integer.parseInt(optVal); } args = cmd.getArgs(); // parse arguments if (args.length < 3) { usage(COMMAND); System.exit(1); } byte[] key = null, value = null, secret = null; try { key = args[0].getBytes(ENCODE); value = args[1].getBytes(ENCODE); secret = args[2].getBytes(ENCODE); } catch (UnsupportedEncodingException e1) { // NOTREACHED } // prepare for RPC DHTAccessor accessor = null; try { accessor = new DHTAccessor(gateway); } catch (MalformedURLException e) { e.printStackTrace(); System.exit(1); } // RPC int res = accessor.remove(key, value, ttl, secret); String resultString; switch (res) { case 0: resultString = "Success"; break; case 1: resultString = "Capacity"; break; case 2: resultString = "Again"; break; default: resultString = "???"; } System.out.println(resultString); } }
shudo/dht-access
src/dhtaccess/tools/Remove.java
Java
apache-2.0
3,313
/* * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.flex.compiler.problems; import org.apache.flex.compiler.tree.as.IASNode; /** * Diagnostic emitted when the code generator detects * a definition that conflicts with an inherited definition * from a superclass. */ public final class ConflictingInheritedNameInNamespaceProblem extends SemanticProblem { public static final String DESCRIPTION = "A conflict exists with inherited definition ${declName} in namespace ${nsName}."; public static final int errorCode = 1152; public ConflictingInheritedNameInNamespaceProblem(IASNode site, String declName, String nsName) { super(site); this.declName = declName; this.nsName = nsName; } public final String declName; public final String nsName; }
adufilie/flex-falcon
compiler/src/org/apache/flex/compiler/problems/ConflictingInheritedNameInNamespaceProblem.java
Java
apache-2.0
1,604
// jQueryTemplate.cs // Script#/Libraries/jQuery/Templating // This source code is subject to terms and conditions of the Apache License, Version 2.0. // using System; using System.Collections; using System.Html; using System.Net; using System.Runtime.CompilerServices; using jQueryApi; namespace jQueryApi.Templating { /// <summary> /// Represents a jQuery template that has been parsed and can /// be used to generate HTML. /// </summary> [ScriptIgnoreNamespace] [ScriptImport] public sealed class jQueryTemplate { private jQueryTemplate() { } } }
nikhilk/scriptsharp
src/Libraries/jQuery/jQuery.Templating/jQueryTemplate.cs
C#
apache-2.0
602
/* * Copyright 2018 The Kubeflow Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import * as React from 'react'; import Hr from './Hr'; import { create } from 'react-test-renderer'; describe('Hr', () => { it('renders with the right styles', () => { const tree = create(<Hr fields={[]} />); expect(tree).toMatchSnapshot(); }); });
kubeflow/pipelines
frontend/src/atoms/Hr.test.tsx
TypeScript
apache-2.0
864
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ /* * Content policy implementation that prevents all loads of images, * subframes, etc from protocols that don't return data but rather open * applications (such as mailto). */ #include "nsNoDataProtocolContentPolicy.h" #include "nsIDOMWindow.h" #include "nsString.h" #include "nsIProtocolHandler.h" #include "nsIIOService.h" #include "nsIExternalProtocolHandler.h" #include "nsNetUtil.h" NS_IMPL_ISUPPORTS1(nsNoDataProtocolContentPolicy, nsIContentPolicy) NS_IMETHODIMP nsNoDataProtocolContentPolicy::ShouldLoad(uint32_t aContentType, nsIURI *aContentLocation, nsIURI *aRequestingLocation, nsISupports *aRequestingContext, const nsACString &aMimeGuess, nsISupports *aExtra, nsIPrincipal *aRequestPrincipal, int16_t *aDecision) { *aDecision = nsIContentPolicy::ACCEPT; // Don't block for TYPE_OBJECT since such URIs are sometimes loaded by the // plugin, so they don't necessarily open external apps // TYPE_WEBSOCKET loads can only go to ws:// or wss://, so we don't need to // concern ourselves with them. if (aContentType != TYPE_DOCUMENT && aContentType != TYPE_SUBDOCUMENT && aContentType != TYPE_OBJECT && aContentType != TYPE_WEBSOCKET) { // The following are just quick-escapes for the most common cases // where we would allow the content to be loaded anyway. nsAutoCString scheme; aContentLocation->GetScheme(scheme); if (scheme.EqualsLiteral("http") || scheme.EqualsLiteral("https") || scheme.EqualsLiteral("ftp") || scheme.EqualsLiteral("file") || scheme.EqualsLiteral("chrome")) { return NS_OK; } bool shouldBlock; nsresult rv = NS_URIChainHasFlags(aContentLocation, nsIProtocolHandler::URI_DOES_NOT_RETURN_DATA, &shouldBlock); if (NS_SUCCEEDED(rv) && shouldBlock) { *aDecision = nsIContentPolicy::REJECT_REQUEST; } } return NS_OK; } NS_IMETHODIMP nsNoDataProtocolContentPolicy::ShouldProcess(uint32_t aContentType, nsIURI *aContentLocation, nsIURI *aRequestingLocation, nsISupports *aRequestingContext, const nsACString &aMimeGuess, nsISupports *aExtra, nsIPrincipal *aRequestPrincipal, int16_t *aDecision) { return ShouldLoad(aContentType, aContentLocation, aRequestingLocation, aRequestingContext, aMimeGuess, aExtra, aRequestPrincipal, aDecision); }
sergecodd/FireFox-OS
B2G/gecko/content/base/src/nsNoDataProtocolContentPolicy.cpp
C++
apache-2.0
3,310
/** * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.pnc.rest.provider; import org.jboss.pnc.model.ProductMilestone; import org.jboss.pnc.model.ProductMilestoneRelease; import org.jboss.pnc.rest.restmodel.ProductMilestoneReleaseRest; import org.jboss.pnc.spi.datastore.repositories.PageInfoProducer; import org.jboss.pnc.spi.datastore.repositories.ProductMilestoneReleaseRepository; import org.jboss.pnc.spi.datastore.repositories.ProductMilestoneRepository; import org.jboss.pnc.spi.datastore.repositories.SortInfoProducer; import org.jboss.pnc.spi.datastore.repositories.api.RSQLPredicateProducer; import javax.ejb.Stateless; import javax.inject.Inject; import java.util.function.Function; @Stateless public class ProductMilestoneReleaseProvider extends AbstractProvider<ProductMilestoneRelease, ProductMilestoneReleaseRest> { private ProductMilestoneRepository milestoneRepository; private ProductMilestoneReleaseRepository releaseRepository; @Inject public ProductMilestoneReleaseProvider(ProductMilestoneReleaseRepository releaseRepository, ProductMilestoneRepository milestoneRepository, RSQLPredicateProducer rsqlPredicateProducer, SortInfoProducer sortInfoProducer, PageInfoProducer pageInfoProducer) { super(releaseRepository, rsqlPredicateProducer, sortInfoProducer, pageInfoProducer); this.releaseRepository = releaseRepository; this.milestoneRepository = milestoneRepository; } // needed for EJB/CDI @Deprecated public ProductMilestoneReleaseProvider() { } @Override protected Function<? super ProductMilestoneRelease, ? extends ProductMilestoneReleaseRest> toRESTModel() { return ProductMilestoneReleaseRest::new; } @Override protected Function<? super ProductMilestoneReleaseRest, ? extends ProductMilestoneRelease> toDBModel() { throw new IllegalStateException("ProductMilestoneRelease entity is not to be created via REST"); } public ProductMilestoneReleaseRest latestForMilestone(Integer milestoneId) { ProductMilestone milestone = milestoneRepository.queryById(milestoneId); ProductMilestoneRelease release = milestone == null ? null : releaseRepository.findLatestByMilestone(milestone); return release == null ? null : toRESTModel().apply(release); } }
dans123456/pnc
rest/src/main/java/org/jboss/pnc/rest/provider/ProductMilestoneReleaseProvider.java
Java
apache-2.0
3,113
/** * Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ define(['lodash', 'log', 'event_channel', './abstract-source-gen-visitor', './connector-action-visitor', './variable-declaration-visitor', './connector-declaration-visitor', './statement-visitor-factory'], function(_, log, EventChannel, AbstractSourceGenVisitor, ConnectorActionVisitor, VariableDeclarationVisitor, ConnectorDeclarationVisitor, StatementVisitorFactory) { /** * @param {ASTVisitor} parent - parent visitor * @constructor */ var ConnectorDefinitionVisitor = function (parent) { AbstractSourceGenVisitor.call(this, parent); }; ConnectorDefinitionVisitor.prototype = Object.create(AbstractSourceGenVisitor.prototype); ConnectorDefinitionVisitor.prototype.constructor = ConnectorDefinitionVisitor; ConnectorDefinitionVisitor.prototype.canVisitConnectorDefinition = function(connectorDefinition){ return true; }; /** * Begin the visit and generate the source * @param {ConnectorDefinition} connectorDefinition - Connector Definition */ ConnectorDefinitionVisitor.prototype.beginVisitConnectorDefinition = function(connectorDefinition){ /** * set the configuration start for the connector definition language construct * If we need to add additional parameters which are dynamically added to the configuration start * that particular source generation has to be constructed here */ var self = this; var argumentsSrc = ""; _.forEach(connectorDefinition.getAnnotations(), function(annotation) { if (!_.isEmpty(annotation.value)) { var constructedPathAnnotation; if (annotation.key.indexOf(":") === -1) { constructedPathAnnotation = '@' + annotation.key + '("' + annotation.value + '")\n'; } else { constructedPathAnnotation = '@' + annotation.key.split(":")[0] + '(' + annotation.key.split(":")[1] + ' = "' + annotation.value + '")\n'; } self.appendSource(constructedPathAnnotation); } }); _.forEach(connectorDefinition.getArguments(), function(argument, index){ argumentsSrc += argument.type + " "; argumentsSrc += argument.identifier; if (connectorDefinition.getArguments().length - 1 != index) { argumentsSrc += ", "; } }); var constructedSourceSegment = 'connector ' + connectorDefinition.getConnectorName() + ' (' + argumentsSrc + ')' + ' {\n'; this.appendSource(constructedSourceSegment); log.debug('Begin Visit Connector Definition'); }; ConnectorDefinitionVisitor.prototype.visitConnectorDefinition = function(connectorDefinition){ log.debug('Visit Connector Definition'); }; /** * End visiting the connector definition * @param {ConnectorDefinition} connectorDefinition - Connector Definition */ ConnectorDefinitionVisitor.prototype.endVisitConnectorDefinition = function(connectorDefinition){ this.appendSource("}\n"); this.getParent().appendSource(this.getGeneratedSource()); log.debug('End Visit Connector Definition'); }; /** * Visit Connector Action * @param {ConnectorAction} connectorAction */ ConnectorDefinitionVisitor.prototype.visitConnectorAction = function(connectorAction){ var connectorActionVisitor = new ConnectorActionVisitor(this); connectorAction.accept(connectorActionVisitor); }; /** * Visit Connector Declaration * @param {ConnectorDeclaration} connectorDeclaration */ ConnectorDefinitionVisitor.prototype.visitConnectorDeclaration = function(connectorDeclaration){ var connectorDeclarationVisitor = new ConnectorDeclarationVisitor(this); connectorDeclaration.accept(connectorDeclarationVisitor); }; /** * Visit Variable Declaration * @param {VariableDeclaration} variableDeclaration */ ConnectorDefinitionVisitor.prototype.visitVariableDeclaration = function(variableDeclaration){ var variableDeclarationVisitor = new VariableDeclarationVisitor(this); variableDeclaration.accept(variableDeclarationVisitor); }; /** * Visit Statements * @param {Statement} statement */ ConnectorDefinitionVisitor.prototype.visitStatement = function (statement) { var statementVisitorFactory = new StatementVisitorFactory(); var statementVisitor = statementVisitorFactory.getStatementVisitor(statement, this); statement.accept(statementVisitor); }; return ConnectorDefinitionVisitor; });
nu1silva/product-apim
product/carbon-home/resources/composer/web/js/ballerina/visitors/source-gen/connector-definition-visitor.js
JavaScript
apache-2.0
5,798
/* -*- Mode: C++; indent-tabs-mode: nil; c-basic-offset: 4 -*- * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ /* This program reads an ELF file and computes information about * redundancies. */ #include <algorithm> #include <fstream> #include <string> #include <vector> #include <map> #include <elf.h> #include <sys/mman.h> #include <sys/stat.h> #include <fcntl.h> #include <unistd.h> #include <errno.h> #include <getopt.h> //---------------------------------------------------------------------- char* opt_type = "func"; char* opt_section = ".text"; //---------------------------------------------------------------------- static void hexdump(ostream& out, const char* bytes, size_t count) { hex(out); size_t off = 0; while (off < count) { out.form("%08lx: ", off); const char* p = bytes + off; int j = 0; while (j < 16) { out.form("%02x", p[j++] & 0xff); if (j + off >= count) break; out.form("%02x ", p[j++] & 0xff); if (j + off >= count) break; } // Pad for (; j < 16; ++j) out << ((j%2) ? " " : " "); for (j = 0; j < 16; ++j) { if (j + off < count) out.put(isprint(p[j]) ? p[j] : '.'); } out << endl; off += 16; } } //---------------------------------------------------------------------- int verify_elf_header(const Elf32_Ehdr* hdr) { if (hdr->e_ident[EI_MAG0] != ELFMAG0 || hdr->e_ident[EI_MAG1] != ELFMAG1 || hdr->e_ident[EI_MAG2] != ELFMAG2 || hdr->e_ident[EI_MAG3] != ELFMAG3) { cerr << "not an elf file" << endl; return -1; } if (hdr->e_ident[EI_CLASS] != ELFCLASS32) { cerr << "not a 32-bit elf file" << endl; return -1; } if (hdr->e_ident[EI_DATA] != ELFDATA2LSB) { cerr << "not a little endian elf file" << endl; return -1; } if (hdr->e_ident[EI_VERSION] != EV_CURRENT) { cerr << "incompatible version" << endl; return -1; } return 0; } //---------------------------------------------------------------------- class elf_symbol : public Elf32_Sym { public: elf_symbol(const Elf32_Sym& sym) { ::memcpy(static_cast<Elf32_Sym*>(this), &sym, sizeof(Elf32_Sym)); } friend bool operator==(const elf_symbol& lhs, const elf_symbol& rhs) { return 0 == ::memcmp(static_cast<const Elf32_Sym*>(&lhs), static_cast<const Elf32_Sym*>(&rhs), sizeof(Elf32_Sym)); } }; //---------------------------------------------------------------------- static const char* st_bind(unsigned char info) { switch (ELF32_ST_BIND(info)) { case STB_LOCAL: return "local"; case STB_GLOBAL: return "global"; case STB_WEAK: return "weak"; default: return "unknown"; } } static const char* st_type(unsigned char info) { switch (ELF32_ST_TYPE(info)) { case STT_NOTYPE: return "none"; case STT_OBJECT: return "object"; case STT_FUNC: return "func"; case STT_SECTION: return "section"; case STT_FILE: return "file"; default: return "unknown"; } } static unsigned char st_type(const char* type) { if (strcmp(type, "none") == 0) { return STT_NOTYPE; } else if (strcmp(type, "object") == 0) { return STT_OBJECT; } else if (strcmp(type, "func") == 0) { return STT_FUNC; } else { return 0; } } //---------------------------------------------------------------------- typedef vector<elf_symbol> elf_symbol_table; typedef map< basic_string<char>, elf_symbol_table > elf_text_map; void process_mapping(char* mapping, size_t size) { const Elf32_Ehdr* ehdr = reinterpret_cast<Elf32_Ehdr*>(mapping); if (verify_elf_header(ehdr) < 0) return; // find the section headers const Elf32_Shdr* shdrs = reinterpret_cast<Elf32_Shdr*>(mapping + ehdr->e_shoff); // find the section header string table, .shstrtab const Elf32_Shdr* shstrtabsh = shdrs + ehdr->e_shstrndx; const char* shstrtab = mapping + shstrtabsh->sh_offset; // find the sections we care about const Elf32_Shdr *symtabsh, *strtabsh, *textsh; int textndx; for (int i = 0; i < ehdr->e_shnum; ++i) { basic_string<char> name(shstrtab + shdrs[i].sh_name); if (name == opt_section) { textsh = shdrs + i; textndx = i; } else if (name == ".symtab") { symtabsh = shdrs + i; } else if (name == ".strtab") { strtabsh = shdrs + i; } } // find the .strtab char* strtab = mapping + strtabsh->sh_offset; // find the .text char* text = mapping + textsh->sh_offset; int textaddr = textsh->sh_addr; // find the symbol table int nentries = symtabsh->sh_size / sizeof(Elf32_Sym); Elf32_Sym* symtab = reinterpret_cast<Elf32_Sym*>(mapping + symtabsh->sh_offset); // look for symbols in the .text section elf_text_map textmap; for (int i = 0; i < nentries; ++i) { const Elf32_Sym* sym = symtab + i; if (sym->st_shndx == textndx && ELF32_ST_TYPE(sym->st_info) == st_type(opt_type) && sym->st_size) { basic_string<char> functext(text + sym->st_value - textaddr, sym->st_size); elf_symbol_table& syms = textmap[functext]; if (syms.end() == find(syms.begin(), syms.end(), elf_symbol(*sym))) syms.insert(syms.end(), *sym); } } int uniquebytes = 0, totalbytes = 0; int uniquecount = 0, totalcount = 0; for (elf_text_map::const_iterator entry = textmap.begin(); entry != textmap.end(); ++entry) { const elf_symbol_table& syms = entry->second; if (syms.size() <= 1) continue; int sz = syms.begin()->st_size; uniquebytes += sz; totalbytes += sz * syms.size(); uniquecount += 1; totalcount += syms.size(); for (elf_symbol_table::const_iterator sym = syms.begin(); sym != syms.end(); ++sym) cout << strtab + sym->st_name << endl; dec(cout); cout << syms.size() << " copies of " << sz << " bytes"; cout << " (" << ((syms.size() - 1) * sz) << " redundant bytes)" << endl; hexdump(cout, entry->first.data(), entry->first.size()); cout << endl; } dec(cout); cout << "bytes unique=" << uniquebytes << ", total=" << totalbytes << endl; cout << "entries unique=" << uniquecount << ", total=" << totalcount << endl; } void process_file(const char* name) { int fd = open(name, O_RDWR); if (fd >= 0) { struct stat statbuf; if (fstat(fd, &statbuf) >= 0) { size_t size = statbuf.st_size; void* mapping = mmap(0, size, PROT_READ, MAP_SHARED, fd, 0); if (mapping != MAP_FAILED) { process_mapping(static_cast<char*>(mapping), size); munmap(mapping, size); } } close(fd); } } static void usage() { cerr << "foldelf [--section=<section>] [--type=<type>] [file ...]\n\ --section, -s the section of the ELF file to scan; defaults\n\ to ``.text''. Valid values include any section\n\ of the ELF file.\n\ --type, -t the type of object to examine in the section;\n\ defaults to ``func''. Valid values include\n\ ``none'', ``func'', or ``object''.\n"; } static struct option opts[] = { { "type", required_argument, 0, 't' }, { "section", required_argument, 0, 's' }, { "help", no_argument, 0, '?' }, { 0, 0, 0, 0 } }; int main(int argc, char* argv[]) { while (1) { int option_index = 0; int c = getopt_long(argc, argv, "t:s:", opts, &option_index); if (c < 0) break; switch (c) { case 't': opt_type = optarg; break; case 's': opt_section = optarg; break; case '?': usage(); break; } } for (int i = optind; i < argc; ++i) process_file(argv[i]); return 0; }
sergecodd/FireFox-OS
B2G/gecko/tools/footprint/foldelf.cpp
C++
apache-2.0
8,544
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.java.util.common; import com.google.common.base.Preconditions; import com.google.common.collect.Iterators; import org.apache.druid.java.util.common.io.Closer; import org.apache.druid.java.util.common.parsers.CloseableIterator; import javax.annotation.Nullable; import java.io.Closeable; import java.io.IOException; import java.util.Comparator; import java.util.Iterator; import java.util.List; public class CloseableIterators { public static <T> CloseableIterator<T> concat(List<? extends CloseableIterator<? extends T>> iterators) { final Closer closer = Closer.create(); iterators.forEach(closer::register); final Iterator<T> innerIterator = Iterators.concat(iterators.iterator()); return wrap(innerIterator, closer); } public static <T> CloseableIterator<T> mergeSorted( List<? extends CloseableIterator<? extends T>> iterators, Comparator<T> comparator ) { Preconditions.checkNotNull(comparator); final Closer closer = Closer.create(); iterators.forEach(closer::register); final Iterator<T> innerIterator = Iterators.mergeSorted(iterators, comparator); return wrap(innerIterator, closer); } public static <T> CloseableIterator<T> wrap(Iterator<T> innerIterator, @Nullable Closeable closeable) { return new CloseableIterator<T>() { private boolean closed; @Override public boolean hasNext() { return innerIterator.hasNext(); } @Override public T next() { return innerIterator.next(); } @Override public void close() throws IOException { if (!closed) { if (closeable != null) { closeable.close(); } closed = true; } } }; } public static <T> CloseableIterator<T> withEmptyBaggage(Iterator<T> innerIterator) { return wrap(innerIterator, null); } private CloseableIterators() {} }
dkhwangbo/druid
java-util/src/main/java/org/apache/druid/java/util/common/CloseableIterators.java
Java
apache-2.0
2,771
//======= Copyright (c) Valve Corporation, All rights reserved. =============== using UnityEngine; using System.Collections; using System.Collections.Generic; using System.Linq; using UnityEngine.Serialization; namespace Valve.VR { public class SteamVR_Settings : ScriptableObject { private static SteamVR_Settings _instance; public static SteamVR_Settings instance { get { LoadInstance(); return _instance; } } public bool pauseGameWhenDashboardVisible = true; public bool lockPhysicsUpdateRateToRenderFrequency = true; public ETrackingUniverseOrigin trackingSpace { get { return trackingSpaceOrigin; } set { trackingSpaceOrigin = value; if (SteamVR_Behaviour.isPlaying) SteamVR_Action_Pose.SetTrackingUniverseOrigin(trackingSpaceOrigin); } } [SerializeField] [FormerlySerializedAsAttribute("trackingSpace")] private ETrackingUniverseOrigin trackingSpaceOrigin = ETrackingUniverseOrigin.TrackingUniverseStanding; [Tooltip("Filename local to the project root (or executable, in a build)")] public string actionsFilePath = "actions.json"; [Tooltip("Path local to the Assets folder")] public string steamVRInputPath = "SteamVR_Input"; public SteamVR_UpdateModes inputUpdateMode = SteamVR_UpdateModes.OnUpdate; public SteamVR_UpdateModes poseUpdateMode = SteamVR_UpdateModes.OnPreCull; public bool activateFirstActionSetOnStart = true; [Tooltip("This is the app key the unity editor will use to identify your application. (can be \"steam.app.[appid]\" to persist bindings between editor steam)")] public string editorAppKey; [Tooltip("The SteamVR Plugin can automatically make sure VR is enabled in your player settings and if not, enable it.")] public bool autoEnableVR = true; [Space()] [Tooltip("This determines if we use legacy mixed reality mode (3rd controller/tracker device connected) or the new input system mode (pose / input source)")] public bool legacyMixedRealityCamera = true; [Tooltip("[NON-LEGACY] This is the pose action that will be used for positioning a mixed reality camera if connected")] public SteamVR_Action_Pose mixedRealityCameraPose = SteamVR_Input.GetPoseAction("ExternalCamera"); [Tooltip("[NON-LEGACY] This is the input source to check on the pose for the mixed reality camera")] public SteamVR_Input_Sources mixedRealityCameraInputSource = SteamVR_Input_Sources.Camera; [Tooltip("[NON-LEGACY] Auto enable mixed reality action set if file exists")] public bool mixedRealityActionSetAutoEnable = true; public bool IsInputUpdateMode(SteamVR_UpdateModes tocheck) { return (inputUpdateMode & tocheck) == tocheck; } public bool IsPoseUpdateMode(SteamVR_UpdateModes tocheck) { return (poseUpdateMode & tocheck) == tocheck; } public static void VerifyScriptableObject() { LoadInstance(); } private static void LoadInstance() { if (_instance == null) { _instance = Resources.Load<SteamVR_Settings>("SteamVR_Settings"); if (_instance == null) { _instance = SteamVR_Settings.CreateInstance<SteamVR_Settings>(); #if UNITY_EDITOR string folderPath = SteamVR.GetResourcesFolderPath(true); string assetPath = System.IO.Path.Combine(folderPath, "SteamVR_Settings.asset"); UnityEditor.AssetDatabase.CreateAsset(_instance, assetPath); UnityEditor.AssetDatabase.SaveAssets(); #endif } if (string.IsNullOrEmpty(_instance.editorAppKey)) { _instance.editorAppKey = SteamVR.GenerateAppKey(); Debug.Log("<b>[SteamVR Setup]</b> Generated you an editor app key of: " + _instance.editorAppKey + ". This lets the editor tell SteamVR what project this is. Has no effect on builds. This can be changed in Assets/SteamVR/Resources/SteamVR_Settings"); #if UNITY_EDITOR UnityEditor.EditorUtility.SetDirty(_instance); UnityEditor.AssetDatabase.SaveAssets(); #endif } } } } }
googlevr/tilt-brush
Assets/ThirdParty/SteamVR/Scripts/SteamVR_Settings.cs
C#
apache-2.0
4,631
using System.Collections.Generic; namespace DocGenerator.Documentation.Blocks { /// <summary> /// Used to keep a line of code (could be multiple e.g fluent syntax) and its annotations in one logical unit. /// So they do not suffer from reordering based on line number when writing out the documentation /// </summary> public class CombinedBlock : IDocumentationBlock { public string Value { get; } public IEnumerable<IDocumentationBlock> Blocks { get; } public int LineNumber { get; } public CombinedBlock(IEnumerable<IDocumentationBlock> blocks, int lineNumber) { Blocks = blocks; LineNumber = lineNumber; Value = null; } } }
adam-mccoy/elasticsearch-net
src/CodeGeneration/DocGenerator/Documentation/Blocks/CombinedBlock.cs
C#
apache-2.0
661
package com.code.constant; /** * Created by niu on 2017/8/17. */ public class StringEvent { //网络状态改变 public static String NET_STATE_CHANGE = "net_state_change"; }
niuzhijun66/NiuStudyDemo
app/src/main/java/com/code/constant/StringEvent.java
Java
apache-2.0
187
import numpy as np from math import sin, pi, cos from banti.glyph import Glyph halfsize = 40 size = 2*halfsize + 1 picture = np.zeros((size, size)) for t in range(-135, 135): x = round(halfsize + halfsize * cos(pi * t / 180)) y = round(halfsize + halfsize * sin(pi * t / 180)) picture[x][y] = 1 zoomsz = 1 * halfsize b = Glyph(['O', 0, 0, size, size, 0, 0, 0, 0, None]) b.set_pix(picture) c = Glyph() for t in range(0, 360, 15): x = round(zoomsz + zoomsz * cos(pi * t / 180)) y = round(zoomsz + zoomsz * sin(pi * t / 180)) b.set_xy_wh((x, y, size, size)) c = c + b print(b) print(c)
rakeshvar/telugu_ocr_banti
tests/glyph_test.py
Python
apache-2.0
613
/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #include "tensorflow/contrib/lite/testing/tflite_driver.h" #include <iostream> #include "tensorflow/contrib/lite/builtin_op_data.h" #include "tensorflow/contrib/lite/testing/split.h" namespace tflite { namespace testing { namespace { // Returns the value in the given position in a tensor. template <typename T> T Value(const TfLitePtrUnion& data, int index); template <> float Value(const TfLitePtrUnion& data, int index) { return data.f[index]; } template <> int32_t Value(const TfLitePtrUnion& data, int index) { return data.i32[index]; } template <> int64_t Value(const TfLitePtrUnion& data, int index) { return data.i64[index]; } template <> uint8_t Value(const TfLitePtrUnion& data, int index) { return data.uint8[index]; } template <> bool Value(const TfLitePtrUnion& data, int index) { return data.b[index]; } template <typename T> void SetTensorData(const std::vector<T>& values, TfLitePtrUnion* data) { T* input_ptr = reinterpret_cast<T*>(data->raw); for (const T& v : values) { *input_ptr = v; ++input_ptr; } } } // namespace class TfLiteDriver::Expectation { public: Expectation() { data_.raw = nullptr; num_elements_ = 0; } ~Expectation() { delete[] data_.raw; } template <typename T> void SetData(const string& csv_values) { const auto& values = testing::Split<T>(csv_values, ","); num_elements_ = values.size(); data_.raw = new char[num_elements_ * sizeof(T)]; SetTensorData(values, &data_); } bool Check(bool verbose, const TfLiteTensor& tensor) { switch (tensor.type) { case kTfLiteFloat32: return TypedCheck<float>(verbose, tensor); case kTfLiteInt32: return TypedCheck<int32_t>(verbose, tensor); case kTfLiteInt64: return TypedCheck<int64_t>(verbose, tensor); case kTfLiteUInt8: return TypedCheck<uint8_t>(verbose, tensor); case kTfLiteBool: return TypedCheck<bool>(verbose, tensor); default: fprintf(stderr, "Unsupported type %d in Check\n", tensor.type); return false; } } private: template <typename T> bool TypedCheck(bool verbose, const TfLiteTensor& tensor) { // TODO(ahentz): must find a way to configure the tolerance. constexpr double kRelativeThreshold = 1e-2f; constexpr double kAbsoluteThreshold = 1e-4f; size_t tensor_size = tensor.bytes / sizeof(T); if (tensor_size != num_elements_) { std::cerr << "Expected a tensor with " << num_elements_ << " elements, got " << tensor_size << std::endl; return false; } bool good_output = true; for (int i = 0; i < tensor_size; ++i) { float computed = Value<T>(tensor.data, i); float reference = Value<T>(data_, i); float diff = std::abs(computed - reference); bool error_is_large = false; // For very small numbers, try absolute error, otherwise go with // relative. if (std::abs(reference) < kRelativeThreshold) { error_is_large = (diff > kAbsoluteThreshold); } else { error_is_large = (diff > kRelativeThreshold * std::abs(reference)); } if (error_is_large) { good_output = false; if (verbose) { std::cerr << " index " << i << ": got " << computed << ", but expected " << reference << std::endl; } } } return good_output; } TfLitePtrUnion data_; size_t num_elements_; }; TfLiteDriver::TfLiteDriver(bool use_nnapi) : use_nnapi_(use_nnapi) {} TfLiteDriver::~TfLiteDriver() {} void TfLiteDriver::AllocateTensors() { if (must_allocate_tensors_) { if (interpreter_->AllocateTensors() != kTfLiteOk) { Invalidate("Failed to allocate tensors"); return; } ResetLSTMStateTensors(); must_allocate_tensors_ = false; } } void TfLiteDriver::LoadModel(const string& bin_file_path) { if (!IsValid()) return; model_ = FlatBufferModel::BuildFromFile(GetFullPath(bin_file_path).c_str()); if (!model_) { Invalidate("Failed to mmap model " + bin_file_path); return; } ops::builtin::BuiltinOpResolver builtins; InterpreterBuilder(*model_, builtins)(&interpreter_); if (!interpreter_) { Invalidate("Failed build interpreter"); return; } must_allocate_tensors_ = true; } void TfLiteDriver::ResetTensor(int id) { if (!IsValid()) return; auto* tensor = interpreter_->tensor(id); memset(tensor->data.raw, 0, tensor->bytes); } void TfLiteDriver::ReshapeTensor(int id, const string& csv_values) { if (!IsValid()) return; if (interpreter_->ResizeInputTensor( id, testing::Split<int>(csv_values, ",")) != kTfLiteOk) { Invalidate("Failed to resize input tensor " + std::to_string(id)); return; } must_allocate_tensors_ = true; } void TfLiteDriver::SetInput(int id, const string& csv_values) { if (!IsValid()) return; auto* tensor = interpreter_->tensor(id); switch (tensor->type) { case kTfLiteFloat32: { const auto& values = testing::Split<float>(csv_values, ","); if (!CheckSizes<float>(tensor->bytes, values.size())) return; SetTensorData(values, &tensor->data); break; } case kTfLiteInt32: { const auto& values = testing::Split<int32_t>(csv_values, ","); if (!CheckSizes<int32_t>(tensor->bytes, values.size())) return; SetTensorData(values, &tensor->data); break; } case kTfLiteInt64: { const auto& values = testing::Split<int64_t>(csv_values, ","); if (!CheckSizes<int64_t>(tensor->bytes, values.size())) return; SetTensorData(values, &tensor->data); break; } case kTfLiteUInt8: { const auto& values = testing::Split<uint8_t>(csv_values, ","); if (!CheckSizes<uint8_t>(tensor->bytes, values.size())) return; SetTensorData(values, &tensor->data); break; } case kTfLiteBool: { const auto& values = testing::Split<bool>(csv_values, ","); if (!CheckSizes<bool>(tensor->bytes, values.size())) return; SetTensorData(values, &tensor->data); break; } default: fprintf(stderr, "Unsupported type %d in SetInput\n", tensor->type); Invalidate("Unsupported tensor data type"); return; } } void TfLiteDriver::SetExpectation(int id, const string& csv_values) { if (!IsValid()) return; auto* tensor = interpreter_->tensor(id); if (expected_output_.count(id) != 0) { fprintf(stderr, "Overridden expectation for tensor %d\n", id); Invalidate("Overridden expectation"); } expected_output_[id].reset(new Expectation); switch (tensor->type) { case kTfLiteFloat32: expected_output_[id]->SetData<float>(csv_values); break; case kTfLiteInt32: expected_output_[id]->SetData<int32_t>(csv_values); break; case kTfLiteInt64: expected_output_[id]->SetData<int64_t>(csv_values); break; case kTfLiteUInt8: expected_output_[id]->SetData<uint8_t>(csv_values); break; case kTfLiteBool: expected_output_[id]->SetData<bool>(csv_values); break; default: fprintf(stderr, "Unsupported type %d in SetExpectation\n", tensor->type); Invalidate("Unsupported tensor data type"); return; } } void TfLiteDriver::Invoke() { if (!IsValid()) return; if (interpreter_->Invoke() != kTfLiteOk) { Invalidate("Failed to invoke interpreter"); } } bool TfLiteDriver::CheckResults() { if (!IsValid()) return false; bool success = true; for (const auto& p : expected_output_) { int id = p.first; auto* tensor = interpreter_->tensor(id); if (!p.second->Check(/*verbose=*/false, *tensor)) { // Do not invalidate anything here. Instead, simply output the // differences and return false. Invalidating would prevent all // subsequent invocations from running.. std::cerr << "There were errors in invocation '" << GetInvocationId() << "', output tensor '" << id << "':" << std::endl; p.second->Check(/*verbose=*/true, *tensor); success = false; SetOverallSuccess(false); } } expected_output_.clear(); return success; } void TfLiteDriver::ResetLSTMStateTensors() { // This is a workaround for initializing state tensors for LSTM. // TODO(ycling): Refactoring and find a better way to initialize state // tensors. Maybe write the reset instructions into the test data. for (auto node_index : interpreter_->execution_plan()) { const auto& node_and_reg = interpreter_->node_and_registration(node_index); const auto& node = node_and_reg->first; const auto& registration = node_and_reg->second; if (registration.builtin_code == tflite::BuiltinOperator_LSTM) { const auto* params = reinterpret_cast<const TfLiteLSTMParams*>(node.builtin_data); if (params->kernel_type == kTfLiteLSTMFullKernel && node.outputs->size >= 2) { // The first 2 outputs of LSTM are state tensors. for (int i = 0; i < 2; ++i) { int node_index = node.outputs->data[i]; ResetTensor(node_index); } } else if (params->kernel_type == kTfLiteLSTMBasicKernel && node.inputs->size == 5) { // The 2th and 5th inputs are state tensors. for (int i : {1, 4}) { int node_index = node.inputs->data[i]; ResetTensor(node_index); } } } } } } // namespace testing } // namespace tflite
yanchen036/tensorflow
tensorflow/contrib/lite/testing/tflite_driver.cc
C++
apache-2.0
10,088
/* * Copyright 2013-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.cloudfoundry.util; import org.cloudfoundry.client.v2.ClientV2Exception; import reactor.core.publisher.Mono; import java.util.Arrays; import java.util.function.Predicate; /** * Utilities for dealing with {@link Exception}s */ public final class ExceptionUtils { private ExceptionUtils() { } /** * Returns a {@link Mono} containing an {@link IllegalArgumentException} with the configured message * * @param format A <a href="../util/Formatter.html#syntax">format string</a> * @param args Arguments referenced by the format specifiers in the format string. If there are more arguments than format specifiers, the extra arguments are ignored. The number of arguments * is variable and may be zero. The maximum number of arguments is limited by the maximum dimension of a Java array as defined by <cite>The Java&trade; Virtual Machine * Specification</cite>. The behaviour on a {@code null} argument depends on the <a href="../util/Formatter.html#syntax">conversion</a>. * @param <T> the type of the {@link Mono} being converted * @return a {@link Mono} containing the error */ public static <T> Mono<T> illegalArgument(String format, Object... args) { String message = String.format(format, args); return Mono.error(new IllegalArgumentException(message)); } /** * Returns a {@link Mono} containing an {@link IllegalStateException} with the configured message * * @param format A <a href="../util/Formatter.html#syntax">format string</a> * @param args Arguments referenced by the format specifiers in the format string. If there are more arguments than format specifiers, the extra arguments are ignored. The number of arguments * is variable and may be zero. The maximum number of arguments is limited by the maximum dimension of a Java array as defined by <cite>The Java&trade; Virtual Machine * Specification</cite>. The behaviour on a {@code null} argument depends on the <a href="../util/Formatter.html#syntax">conversion</a>. * @param <T> the type of the {@link Mono} being converted * @return a {@link Mono} containing the error */ public static <T> Mono<T> illegalState(String format, Object... args) { String message = String.format(format, args); return Mono.error(new IllegalStateException(message)); } /** * A predicate that returns {@code true} if the exception is a {@link ClientV2Exception} and its code matches expectation * * @param codes the codes to match * @return {@code true} if the exception is a {@link ClientV2Exception} and its code matches */ public static Predicate<? super Throwable> statusCode(int... codes) { return t -> t instanceof ClientV2Exception && Arrays.stream(codes).anyMatch(candidate -> ((ClientV2Exception) t).getCode().equals(candidate)); } }
Orange-OpenSource/cf-java-client
cloudfoundry-util/src/main/java/org/cloudfoundry/util/ExceptionUtils.java
Java
apache-2.0
3,609
/* * Copyright 2012-2014 MarkLogic Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.marklogic.samplestack.web.security; import java.io.IOException; import java.io.Writer; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponseWrapper; import org.apache.http.HttpStatus; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.core.AuthenticationException; import org.springframework.security.web.AuthenticationEntryPoint; import org.springframework.stereotype.Component; import com.marklogic.samplestack.web.JsonHttpResponse; @Component /** * Class to customize the default Login handling. Rather than redirection * to a login form, Samplestack simply denies access * (where authentication is required) */ public class SamplestackAuthenticationEntryPoint implements AuthenticationEntryPoint { @Autowired private JsonHttpResponse errors; @Override /** * Override handler that returns 401 for any unauthenticated * request to a secured endpoint. */ public void commence(HttpServletRequest request, HttpServletResponse response, AuthenticationException authException) throws IOException { HttpServletResponseWrapper responseWrapper = new HttpServletResponseWrapper( response); responseWrapper.setStatus(HttpStatus.SC_UNAUTHORIZED); Writer out = responseWrapper.getWriter(); errors.writeJsonResponse(out, HttpStatus.SC_UNAUTHORIZED, "Unauthorized"); out.close(); } }
laurelnaiad/marklogic-samplestack-old
appserver/java-spring/src/main/java/com/marklogic/samplestack/web/security/SamplestackAuthenticationEntryPoint.java
Java
apache-2.0
2,075
function __processArg(obj, key) { var arg = null; if (obj) { arg = obj[key] || null; delete obj[key]; } return arg; } function Controller() { function goSlide(event) { var index = event.source.mod; var arrViews = $.scrollableView.getViews(); switch (index) { case "0": $.lbl1.backgroundColor = "#453363"; $.lbl1.color = "#AA9DB6"; $.lbl2.backgroundColor = "#E6E7E9"; $.lbl2.color = "#4CC4D2"; $.lbl3.backgroundColor = "#E6E7E9"; $.lbl3.color = "#4CC4D2"; break; case "1": $.lbl1.backgroundColor = "#E6E7E9"; $.lbl1.color = "#4CC4D2"; $.lbl2.backgroundColor = "#453363"; $.lbl2.color = "#AA9DB6"; $.lbl3.backgroundColor = "#E6E7E9"; $.lbl3.color = "#4CC4D2"; break; case "2": $.lbl1.backgroundColor = "#E6E7E9"; $.lbl1.color = "#4CC4D2"; $.lbl2.backgroundColor = "#E6E7E9"; $.lbl2.color = "#4CC4D2"; $.lbl3.backgroundColor = "#453363"; $.lbl3.color = "#AA9DB6"; } $.scrollableView.scrollToView(arrViews[index]); } require("alloy/controllers/BaseController").apply(this, Array.prototype.slice.call(arguments)); this.__controllerPath = "contact"; if (arguments[0]) { var __parentSymbol = __processArg(arguments[0], "__parentSymbol"); { __processArg(arguments[0], "$model"); } { __processArg(arguments[0], "__itemTemplate"); } } var $ = this; var exports = {}; var __defers = {}; $.__views.win = Ti.UI.createView({ layout: "vertical", id: "win", backgroundColor: "white" }); $.__views.win && $.addTopLevelView($.__views.win); $.__views.__alloyId87 = Alloy.createController("_header", { id: "__alloyId87", __parentSymbol: $.__views.win }); $.__views.__alloyId87.setParent($.__views.win); $.__views.__alloyId88 = Ti.UI.createView({ height: "20%", backgroundColor: "#836EAF", id: "__alloyId88" }); $.__views.win.add($.__views.__alloyId88); $.__views.__alloyId89 = Ti.UI.createLabel({ text: "Contact us", left: "10", top: "10", color: "white", id: "__alloyId89" }); $.__views.__alloyId88.add($.__views.__alloyId89); $.__views.menu = Ti.UI.createView({ id: "menu", layout: "horizontal", height: "50", width: "100%" }); $.__views.win.add($.__views.menu); $.__views.lbl1 = Ti.UI.createLabel({ text: "Our Offices", id: "lbl1", mod: "0", height: "100%", width: "33%", textAlign: "center", backgroundColor: "#453363", color: "#AA9DB6" }); $.__views.menu.add($.__views.lbl1); goSlide ? $.__views.lbl1.addEventListener("touchend", goSlide) : __defers["$.__views.lbl1!touchend!goSlide"] = true; $.__views.__alloyId90 = Ti.UI.createView({ backgroundColor: "#4CC4D2", height: "100%", width: "0.45%", id: "__alloyId90" }); $.__views.menu.add($.__views.__alloyId90); $.__views.lbl2 = Ti.UI.createLabel({ text: "Care Center", id: "lbl2", mod: "1", height: "100%", width: "33%", textAlign: "center", backgroundColor: "#E6E7E9", color: "#4CC4D2" }); $.__views.menu.add($.__views.lbl2); goSlide ? $.__views.lbl2.addEventListener("touchend", goSlide) : __defers["$.__views.lbl2!touchend!goSlide"] = true; $.__views.__alloyId91 = Ti.UI.createView({ backgroundColor: "#4CC4D2", height: "100%", width: "0.45%", id: "__alloyId91" }); $.__views.menu.add($.__views.__alloyId91); $.__views.lbl3 = Ti.UI.createLabel({ text: "XOX Dealers", id: "lbl3", mod: "2", height: "100%", width: "33%", textAlign: "center", backgroundColor: "#E6E7E9", color: "#4CC4D2" }); $.__views.menu.add($.__views.lbl3); goSlide ? $.__views.lbl3.addEventListener("touchend", goSlide) : __defers["$.__views.lbl3!touchend!goSlide"] = true; var __alloyId92 = []; $.__views.__alloyId93 = Alloy.createController("contact1", { id: "__alloyId93", __parentSymbol: __parentSymbol }); __alloyId92.push($.__views.__alloyId93.getViewEx({ recurse: true })); $.__views.__alloyId94 = Alloy.createController("contact2", { id: "__alloyId94", __parentSymbol: __parentSymbol }); __alloyId92.push($.__views.__alloyId94.getViewEx({ recurse: true })); $.__views.__alloyId95 = Alloy.createController("contact3", { id: "__alloyId95", __parentSymbol: __parentSymbol }); __alloyId92.push($.__views.__alloyId95.getViewEx({ recurse: true })); $.__views.scrollableView = Ti.UI.createScrollableView({ views: __alloyId92, id: "scrollableView", showPagingControl: "false", scrollingEnabled: "false" }); $.__views.win.add($.__views.scrollableView); exports.destroy = function() {}; _.extend($, $.__views); var storeModel = Alloy.createCollection("storeLocator"); var details = storeModel.getStoreList(); console.log(details); __defers["$.__views.lbl1!touchend!goSlide"] && $.__views.lbl1.addEventListener("touchend", goSlide); __defers["$.__views.lbl2!touchend!goSlide"] && $.__views.lbl2.addEventListener("touchend", goSlide); __defers["$.__views.lbl3!touchend!goSlide"] && $.__views.lbl3.addEventListener("touchend", goSlide); _.extend($, exports); } var Alloy = require("alloy"), Backbone = Alloy.Backbone, _ = Alloy._; module.exports = Controller;
hardikamal/xox
Resources/iphone/alloy/controllers/contact.js
JavaScript
apache-2.0
5,935
/* * Copyright 2016 LINE Corporation * * LINE Corporation licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.linecorp.armeria.common; import static java.util.Objects.requireNonNull; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.Formatter; import java.util.Locale; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufUtil; import it.unimi.dsi.fastutil.io.FastByteArrayInputStream; /** * HTTP/2 data. Helpers in this class create {@link HttpData} objects that leave the stream open. * To create a {@link HttpData} that closes the stream, directly instantiate {@link DefaultHttpData}. * * <p>Implementations should generally extend {@link AbstractHttpData} to interact with other {@link HttpData} * implementations. */ public interface HttpData extends HttpObject { /** * Empty HTTP/2 data. */ HttpData EMPTY_DATA = new DefaultHttpData(new byte[0], 0, 0, false); /** * Creates a new instance from the specified byte array. The array is not copied; any changes made in the * array later will be visible to {@link HttpData}. * * @return a new {@link HttpData}. {@link #EMPTY_DATA} if the length of the specified array is 0. */ static HttpData of(byte[] data) { requireNonNull(data, "data"); if (data.length == 0) { return EMPTY_DATA; } return new DefaultHttpData(data, 0, data.length, false); } /** * Creates a new instance from the specified byte array, {@code offset} and {@code length}. * The array is not copied; any changes made in the array later will be visible to {@link HttpData}. * * @return a new {@link HttpData}. {@link #EMPTY_DATA} if {@code length} is 0. * * @throws ArrayIndexOutOfBoundsException if {@code offset} and {@code length} are out of bounds */ static HttpData of(byte[] data, int offset, int length) { requireNonNull(data); if (offset < 0 || length < 0 || offset > data.length - length) { throw new ArrayIndexOutOfBoundsException( "offset: " + offset + ", length: " + length + ", data.length: " + data.length); } if (length == 0) { return EMPTY_DATA; } return new DefaultHttpData(data, offset, length, false); } /** * Converts the specified {@code text} into an {@link HttpData}. * * @param charset the {@link Charset} to use for encoding {@code text} * @param text the {@link String} to convert * * @return a new {@link HttpData}. {@link #EMPTY_DATA} if the length of {@code text} is 0. */ static HttpData of(Charset charset, String text) { requireNonNull(charset, "charset"); requireNonNull(text, "text"); if (text.isEmpty()) { return EMPTY_DATA; } return of(text.getBytes(charset)); } /** * Converts the specified Netty {@link ByteBuf} into an {@link HttpData}. Unlike {@link #of(byte[])}, this * method makes a copy of the {@link ByteBuf}. * * @return a new {@link HttpData}. {@link #EMPTY_DATA} if the readable bytes of {@code buf} is 0. */ static HttpData of(ByteBuf buf) { requireNonNull(buf, "buf"); if (!buf.isReadable()) { return EMPTY_DATA; } return of(ByteBufUtil.getBytes(buf)); } /** * Converts the specified formatted string into an {@link HttpData}. The string is formatted by * {@link String#format(Locale, String, Object...)} with {@linkplain Locale#ENGLISH English locale}. * * @param charset the {@link Charset} to use for encoding string * @param format {@linkplain Formatter the format string} of the response content * @param args the arguments referenced by the format specifiers in the format string * * @return a new {@link HttpData}. {@link #EMPTY_DATA} if {@code format} is empty. */ static HttpData of(Charset charset, String format, Object... args) { requireNonNull(charset, "charset"); requireNonNull(format, "format"); requireNonNull(args, "args"); if (format.isEmpty()) { return EMPTY_DATA; } return of(String.format(Locale.ENGLISH, format, args).getBytes(charset)); } /** * Converts the specified {@code text} into a UTF-8 {@link HttpData}. * * @param text the {@link String} to convert * * @return a new {@link HttpData}. {@link #EMPTY_DATA} if the length of {@code text} is 0. */ static HttpData ofUtf8(String text) { return of(StandardCharsets.UTF_8, text); } /** * Converts the specified formatted string into a UTF-8 {@link HttpData}. The string is formatted by * {@link String#format(Locale, String, Object...)} with {@linkplain Locale#ENGLISH English locale}. * * @param format {@linkplain Formatter the format string} of the response content * @param args the arguments referenced by the format specifiers in the format string * * @return a new {@link HttpData}. {@link #EMPTY_DATA} if {@code format} is empty. */ static HttpData ofUtf8(String format, Object... args) { return of(StandardCharsets.UTF_8, format, args); } /** * Converts the specified {@code text} into a US-ASCII {@link HttpData}. * * @param text the {@link String} to convert * * @return a new {@link HttpData}. {@link #EMPTY_DATA} if the length of {@code text} is 0. */ static HttpData ofAscii(String text) { return of(StandardCharsets.US_ASCII, text); } /** * Converts the specified formatted string into a US-ASCII {@link HttpData}. The string is formatted by * {@link String#format(Locale, String, Object...)} with {@linkplain Locale#ENGLISH English locale}. * * @param format {@linkplain Formatter the format string} of the response content * @param args the arguments referenced by the format specifiers in the format string * * @return a new {@link HttpData}. {@link #EMPTY_DATA} if {@code format} is empty. */ static HttpData ofAscii(String format, Object... args) { return of(StandardCharsets.US_ASCII, format, args); } /** * Returns the underlying byte array of this data. */ byte[] array(); /** * Returns the start offset of the {@link #array()}. */ int offset(); /** * Returns the length of this data. */ int length(); /** * Returns whether the {@link #length()} is 0. */ default boolean isEmpty() { return length() == 0; } /** * Decodes this data into a {@link String}. * * @param charset the {@link Charset} to use for decoding this data * * @return the decoded {@link String} */ default String toString(Charset charset) { requireNonNull(charset, "charset"); return new String(array(), offset(), length(), charset); } /** * Decodes this data into a {@link String} using UTF-8 encoding. * * @return the decoded {@link String} */ default String toStringUtf8() { return toString(StandardCharsets.UTF_8); } /** * Decodes this data into a {@link String} using US-ASCII encoding. * * @return the decoded {@link String} */ default String toStringAscii() { return toString(StandardCharsets.US_ASCII); } /** * Returns a new {@link InputStream} that is sourced from this data. */ default InputStream toInputStream() { return new FastByteArrayInputStream(array(), offset(), length()); } /** * Returns a new {@link Reader} that is sourced from this data and decoded using the specified * {@link Charset}. */ default Reader toReader(Charset charset) { requireNonNull(charset, "charset"); return new InputStreamReader(toInputStream(), charset); } /** * Returns a new {@link Reader} that is sourced from this data and decoded using * {@link StandardCharsets#UTF_8}. */ default Reader toReaderUtf8() { return toReader(StandardCharsets.UTF_8); } /** * Returns a new {@link Reader} that is sourced from this data and decoded using * {@link StandardCharsets#US_ASCII}. */ default Reader toReaderAscii() { return toReader(StandardCharsets.US_ASCII); } }
jmostella/armeria
core/src/main/java/com/linecorp/armeria/common/HttpData.java
Java
apache-2.0
9,114
extern alias SSmDsClient; using System; using System.Collections.Generic; using System.Linq; using OpenRiaServices.DomainServices.Client; using Cities; using Microsoft.Silverlight.Testing; using Microsoft.VisualStudio.TestTools.UnitTesting; using DataTests.Northwind.LTS; using System.ComponentModel.DataAnnotations; using OpenRiaServices.Silverlight.Testing; namespace OpenRiaServices.DomainServices.Client.Test { using Resource = SSmDsClient::OpenRiaServices.DomainServices.Client.Resource; using Resources = SSmDsClient::OpenRiaServices.DomainServices.Client.Resources; #region Test Classes public class TestOperation : OperationBase { private Action<TestOperation> _completeAction; public TestOperation(Action<TestOperation> completeAction, object userState) : base(userState) { this._completeAction = completeAction; } protected override bool SupportsCancellation { get { return true; } } protected override void CancelCore() { base.CancelCore(); } public new void Complete(Exception error) { base.Complete(error); } public new void Complete(object result) { base.Complete(result); } public new void Cancel() { base.Cancel(); } /// <summary> /// Invoke the completion callback. /// </summary> protected override void InvokeCompleteAction() { if (this._completeAction != null) { this._completeAction(this); } } } #endregion /// <summary> /// Targeted tests for OperationBase and derived classes /// </summary> [TestClass] public class OperationTests : UnitTestBase { public void Operation_MarkAsHandled() { TestDataContext ctxt = new TestDataContext(new Uri(TestURIs.RootURI, "TestDomainServices-TestCatalog1.svc")); var query = ctxt.CreateQuery<Product>("ThrowGeneralException", null, false, true); LoadOperation lo = new LoadOperation<Product>(query, LoadBehavior.KeepCurrent, null, null, null); EventHandler action = (o, e) => { LoadOperation loadOperation = (LoadOperation)o; if (loadOperation.HasError) { loadOperation.MarkErrorAsHandled(); } }; lo.Completed += action; DomainOperationException ex = new DomainOperationException("Operation Failed!", OperationErrorStatus.ServerError, 42, "StackTrace"); lo.Complete(ex); // verify that calling MarkAsHandled again is a noop lo.MarkErrorAsHandled(); lo.MarkErrorAsHandled(); // verify that calling MarkAsHandled on an operation not in error // results in an exception lo = new LoadOperation<Product>(query, LoadBehavior.KeepCurrent, null, null, null); Assert.IsFalse(lo.HasError); Assert.IsTrue(lo.IsErrorHandled); ExceptionHelper.ExpectInvalidOperationException(delegate { lo.MarkErrorAsHandled(); }, Resource.Operation_HasErrorMustBeTrue); } /// <summary> /// Verify that Load operations that don't specify a callback to handle /// errors and don't specify throwOnError = false result in an exception. /// </summary> [TestMethod] public void UnhandledLoadOperationError() { TestDataContext ctxt = new TestDataContext(new Uri(TestURIs.RootURI, "TestDomainServices-TestCatalog1.svc")); var query = ctxt.CreateQuery<Product>("ThrowGeneralException", null, false, true); LoadOperation lo = new LoadOperation<Product>(query, LoadBehavior.KeepCurrent, null, null, null); DomainOperationException expectedException = null; DomainOperationException ex = new DomainOperationException("Operation Failed!", OperationErrorStatus.ServerError, 42, "StackTrace"); try { lo.Complete(ex); } catch (DomainOperationException e) { expectedException = e; } // verify the exception properties Assert.IsNotNull(expectedException); Assert.AreEqual(string.Format(Resource.DomainContext_LoadOperationFailed, "ThrowGeneralException", ex.Message), expectedException.Message); Assert.AreEqual(ex.StackTrace, expectedException.StackTrace); Assert.AreEqual(ex.Status, expectedException.Status); Assert.AreEqual(ex.ErrorCode, expectedException.ErrorCode); Assert.AreEqual(false, lo.IsErrorHandled); // now test again with validation errors expectedException = null; ValidationResult[] validationErrors = new ValidationResult[] { new ValidationResult("Foo", new string[] { "Bar" }) }; lo = new LoadOperation<Product>(query, LoadBehavior.KeepCurrent, null, null, null); try { lo.Complete(validationErrors); } catch (DomainOperationException e) { expectedException = e; } // verify the exception properties Assert.IsNotNull(expectedException); Assert.AreEqual(string.Format(Resource.DomainContext_LoadOperationFailed_Validation, "ThrowGeneralException"), expectedException.Message); } /// <summary> /// Verify that Load operations that don't specify a callback to handle /// errors and don't specify throwOnError = false result in an exception. /// </summary> [TestMethod] public void UnhandledInvokeOperationError() { CityDomainContext cities = new CityDomainContext(TestURIs.Cities); InvokeOperation invoke = new InvokeOperation("Echo", null, null, null, null); DomainOperationException expectedException = null; DomainOperationException ex = new DomainOperationException("Operation Failed!", OperationErrorStatus.ServerError, 42, "StackTrace"); try { invoke.Complete(ex); } catch (DomainOperationException e) { expectedException = e; } // verify the exception properties Assert.IsNotNull(expectedException); Assert.AreEqual(string.Format(Resource.DomainContext_InvokeOperationFailed, "Echo", ex.Message), expectedException.Message); Assert.AreEqual(ex.StackTrace, expectedException.StackTrace); Assert.AreEqual(ex.Status, expectedException.Status); Assert.AreEqual(ex.ErrorCode, expectedException.ErrorCode); Assert.AreEqual(false, invoke.IsErrorHandled); // now test again with validation errors expectedException = null; ValidationResult[] validationErrors = new ValidationResult[] { new ValidationResult("Foo", new string[] { "Bar" }) }; invoke = new InvokeOperation("Echo", null, null, null, null); try { invoke.Complete(validationErrors); } catch (DomainOperationException e) { expectedException = e; } // verify the exception properties Assert.IsNotNull(expectedException); Assert.AreEqual(string.Format(Resource.DomainContext_InvokeOperationFailed_Validation, "Echo"), expectedException.Message); } /// <summary> /// Verify that Load operations that don't specify a callback to handle /// errors and don't specify throwOnError = false result in an exception. /// </summary> [TestMethod] public void UnhandledSubmitOperationError() { CityDomainContext cities = new CityDomainContext(TestURIs.Cities); CityData data = new CityData(); cities.Cities.LoadEntities(data.Cities.ToArray()); City city = cities.Cities.First(); city.ZoneID = 1; Assert.IsTrue(cities.EntityContainer.HasChanges); SubmitOperation submit = new SubmitOperation(cities.EntityContainer.GetChanges(), null, null, null); DomainOperationException expectedException = null; DomainOperationException ex = new DomainOperationException("Submit Failed!", OperationErrorStatus.ServerError, 42, "StackTrace"); try { submit.Complete(ex); } catch (DomainOperationException e) { expectedException = e; } // verify the exception properties Assert.IsNotNull(expectedException); Assert.AreEqual(string.Format(Resource.DomainContext_SubmitOperationFailed, ex.Message), expectedException.Message); Assert.AreEqual(ex.StackTrace, expectedException.StackTrace); Assert.AreEqual(ex.Status, expectedException.Status); Assert.AreEqual(ex.ErrorCode, expectedException.ErrorCode); Assert.AreEqual(false, submit.IsErrorHandled); // now test again with conflicts expectedException = null; IEnumerable<ChangeSetEntry> entries = ChangeSetBuilder.Build(cities.EntityContainer.GetChanges()); ChangeSetEntry entry = entries.First(); entry.ValidationErrors = new ValidationResultInfo[] { new ValidationResultInfo("Foo", new string[] { "Bar" }) }; submit = new SubmitOperation(cities.EntityContainer.GetChanges(), null, null, null); try { submit.Complete(OperationErrorStatus.Conflicts); } catch (DomainOperationException e) { expectedException = e; } // verify the exception properties Assert.IsNotNull(expectedException); Assert.AreEqual(string.Format(Resource.DomainContext_SubmitOperationFailed_Conflicts), expectedException.Message); // now test again with validation errors expectedException = null; entries = ChangeSetBuilder.Build(cities.EntityContainer.GetChanges()); entry = entries.First(); entry.ConflictMembers = new string[] { "ZoneID" }; submit = new SubmitOperation(cities.EntityContainer.GetChanges(), null, null, null); try { submit.Complete(OperationErrorStatus.ValidationFailed); } catch (DomainOperationException e) { expectedException = e; } // verify the exception properties Assert.IsNotNull(expectedException); Assert.AreEqual(string.Format(Resource.DomainContext_SubmitOperationFailed_Validation, ex.Message), expectedException.Message); } [TestMethod] [Asynchronous] [Description("Verifies that cached LoadOperation Entity results are valid when accessed from the complete callback.")] public void Bug706034_AccessCachedEntityResultsInCallback() { Cities.CityDomainContext cities = new CityDomainContext(TestURIs.Cities); bool callbackCalled = false; Exception callbackException = null; Action<LoadOperation<City>> callback = (op) => { if (op.HasError) { op.MarkErrorAsHandled(); } try { Assert.AreEqual(11, op.AllEntities.Count()); Assert.AreEqual(11, op.Entities.Count()); } catch (Exception e) { callbackException = e; } finally { callbackCalled = true; } }; var q = cities.GetCitiesQuery(); LoadOperation<City> lo = cities.Load(q, callback, null); // KEY to bug : access Entity collections to force them to cache IEnumerable<City> entities = lo.Entities; IEnumerable<Entity> allEntities = lo.AllEntities; EnqueueConditional(() => lo.IsComplete && callbackCalled); EnqueueCallback(delegate { Assert.IsNull(callbackException); Assert.IsNull(lo.Error); Assert.AreEqual(11, lo.AllEntities.Count()); Assert.AreEqual(11, lo.Entities.Count()); }); EnqueueTestComplete(); } [TestMethod] [Description("Verifies that exceptions are thrown and callstacks are preserved.")] public void Exceptions() { Cities.CityDomainContext cities = new CityDomainContext(TestURIs.Cities); Action<LoadOperation<City>> loCallback = (op) => { if (op.HasError) { op.MarkErrorAsHandled(); } throw new InvalidOperationException("Fnord!"); }; Action<SubmitOperation> soCallback = (op) => { if (op.HasError) { op.MarkErrorAsHandled(); } throw new InvalidOperationException("Fnord!"); }; Action<InvokeOperation> ioCallback = (op) => { if (op.HasError) { op.MarkErrorAsHandled(); } throw new InvalidOperationException("Fnord!"); }; LoadOperation lo = new LoadOperation<City>(cities.GetCitiesQuery(), LoadBehavior.MergeIntoCurrent, loCallback, null, loCallback); // verify completion callbacks that throw ExceptionHelper.ExpectInvalidOperationException(delegate { try { lo.Complete(DomainClientResult.CreateQueryResult(new Entity[0], new Entity[0], 0, new ValidationResult[0])); } catch (Exception ex) { Assert.IsTrue(ex.StackTrace.Contains("at OpenRiaServices.DomainServices.Client.Test.OperationTests"), "Stacktrace not preserved."); throw; } }, "Fnord!"); // verify cancellation callbacks for all fx operation types lo = new LoadOperation<City>(cities.GetCitiesQuery(), LoadBehavior.MergeIntoCurrent, null, null, loCallback); ExceptionHelper.ExpectInvalidOperationException(delegate { lo.Cancel(); }, "Fnord!"); SubmitOperation so = new SubmitOperation(cities.EntityContainer.GetChanges(), soCallback, null, soCallback); ExceptionHelper.ExpectInvalidOperationException(delegate { so.Cancel(); }, "Fnord!"); InvokeOperation io = new InvokeOperation("Fnord", null, null, null, ioCallback); ExceptionHelper.ExpectInvalidOperationException(delegate { io.Cancel(); }, "Fnord!"); } /// <summary> /// Attempt to call cancel from the completion callback. Expect /// an exception since the operation is already complete. /// </summary> [TestMethod] [Asynchronous] public void Bug706066_CancelInCallback() { Cities.CityDomainContext cities = new CityDomainContext(TestURIs.Cities); bool callbackCalled = false; InvalidOperationException expectedException = null; Action<LoadOperation<City>> callback = (op) => { if (op.HasError) { op.MarkErrorAsHandled(); } // verify that CanCancel is false even though we'll // ignore this and try below Assert.IsFalse(op.CanCancel); try { op.Cancel(); } catch (InvalidOperationException io) { expectedException = io; } callbackCalled = true; }; var q = cities.GetCitiesQuery().Take(1); LoadOperation lo = cities.Load(q, callback, null); EnqueueConditional(() => lo.IsComplete && callbackCalled); EnqueueCallback(delegate { Assert.IsFalse(lo.IsCanceled); Assert.AreEqual(Resources.AsyncOperation_AlreadyCompleted, expectedException.Message); }); EnqueueTestComplete(); } } }
jeffhandley/OpenRiaServices
OpenRiaServices.DomainServices.Client/Test/Silverlight/Data/OperationTests.cs
C#
apache-2.0
17,162
require 'spec_helper' describe 'source install' do let(:chef_run) do runner = ChefSpec::Runner.new(platform: 'ubuntu', version: '12.04') runner.node.set['nrpe']['install_method'] = 'source' runner.converge 'nrpe::default' end it 'includes the nrpe source recipes' do expect(chef_run).to include_recipe('nrpe::_source_install') expect(chef_run).to include_recipe('nrpe::_source_nrpe') expect(chef_run).to include_recipe('nrpe::_source_plugins') end it 'includes the build-essential recipe' do expect(chef_run).to include_recipe('build-essential') end it 'installs the correct packages' do expect(chef_run).to install_package('libssl-dev') expect(chef_run).to install_package('make') expect(chef_run).to install_package('tar') end it 'creates the nrpe user' do expect(chef_run).to create_user(chef_run.node['nrpe']['user']) end it 'creates the nrpe group' do expect(chef_run).to create_group(chef_run.node['nrpe']['group']) end it 'creates config dir' do expect(chef_run).to create_directory(chef_run.node['nrpe']['conf_dir']) end it 'templates init script' do expect(chef_run).to render_file("/etc/init.d/#{chef_run.node['nrpe']['service_name']}").with_content('processname: nrpe') end it 'starts service called nrpe not nagios-nrpe-server' do expect(chef_run).to start_service('nrpe') end end
alexanderkiel/chef-nrpe
spec/source_spec.rb
Ruby
apache-2.0
1,398
<?php namespace Fungku\NetSuite\Classes; class BudgetCategory extends Record { public $name; public $budgetType; public $isInactive; public $internalId; static $paramtypesmap = array( "name" => "string", "budgetType" => "boolean", "isInactive" => "boolean", "internalId" => "string", ); }
bitclaw/netsuite-php
src/Classes/BudgetCategory.php
PHP
apache-2.0
305
/* * Copyright 2014 Open Connectome Project (http://openconnecto.me) * Written by Da Zheng (zhengda1936@gmail.com) * * This file is part of FlashMatrix. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "matrix_store.h" #include "local_matrix_store.h" #include "mem_matrix_store.h" #include "EM_dense_matrix.h" #include "EM_object.h" #include "matrix_config.h" #include "local_mem_buffer.h" namespace fm { namespace detail { std::atomic<size_t> matrix_store::mat_counter; matrix_store::ptr matrix_store::create(size_t nrow, size_t ncol, matrix_layout_t layout, const scalar_type &type, int num_nodes, bool in_mem, safs::safs_file_group::ptr group) { if (in_mem) return mem_matrix_store::create(nrow, ncol, layout, type, num_nodes); else return EM_matrix_store::create(nrow, ncol, layout, type, group); } matrix_store::matrix_store(size_t nrow, size_t ncol, bool in_mem, const scalar_type &_type): type(_type) { this->nrow = nrow; this->ncol = ncol; this->in_mem = in_mem; this->entry_size = type.get_size(); this->cache_portion = true; } size_t matrix_store::get_num_portions() const { std::pair<size_t, size_t> chunk_size = get_portion_size(); if (is_wide()) return ceil(((double) get_num_cols()) / chunk_size.second); else return ceil(((double) get_num_rows()) / chunk_size.first); } local_matrix_store::ptr matrix_store::get_portion(size_t id) { size_t start_row; size_t start_col; size_t num_rows; size_t num_cols; std::pair<size_t, size_t> chunk_size = get_portion_size(); if (is_wide()) { start_row = 0; start_col = chunk_size.second * id; num_rows = get_num_rows(); num_cols = std::min(chunk_size.second, get_num_cols() - start_col); } else { start_row = chunk_size.first * id; start_col = 0; num_rows = std::min(chunk_size.first, get_num_rows() - start_row); num_cols = get_num_cols(); } return get_portion(start_row, start_col, num_rows, num_cols); } local_matrix_store::const_ptr matrix_store::get_portion(size_t id) const { size_t start_row; size_t start_col; size_t num_rows; size_t num_cols; std::pair<size_t, size_t> chunk_size = get_portion_size(); if (is_wide()) { start_row = 0; start_col = chunk_size.second * id; num_rows = get_num_rows(); num_cols = std::min(chunk_size.second, get_num_cols() - start_col); } else { start_row = chunk_size.first * id; start_col = 0; num_rows = std::min(chunk_size.first, get_num_rows() - start_row); num_cols = get_num_cols(); } return get_portion(start_row, start_col, num_rows, num_cols); } namespace { class reset_op: public set_operate { const scalar_type &type; size_t entry_size; public: reset_op(const scalar_type &_type): type(_type) { this->entry_size = type.get_size(); } virtual void set(void *arr, size_t num_eles, off_t row_idx, off_t col_idx) const { memset(arr, 0, num_eles * entry_size); } virtual const scalar_type &get_type() const { return type; } virtual set_operate::const_ptr transpose() const { return set_operate::const_ptr(); } }; class set_task: public thread_task { detail::local_matrix_store::ptr local_store; const set_operate &op; public: set_task(detail::local_matrix_store::ptr local_store, const set_operate &_op): op(_op) { this->local_store = local_store; } void run() { local_store->set_data(op); } }; /* * These two functions define the length and portion size for 1D partitioning * on a matrix. */ static inline size_t get_tot_len(const matrix_store &mat) { return mat.is_wide() ? mat.get_num_cols() : mat.get_num_rows(); } static inline size_t get_portion_size(const matrix_store &mat) { return mat.is_wide() ? mat.get_portion_size().second : mat.get_portion_size().first; } class EM_mat_setdata_dispatcher: public EM_portion_dispatcher { const set_operate &op; matrix_store &to_mat; public: EM_mat_setdata_dispatcher(matrix_store &store, const set_operate &_op); virtual void create_task(off_t global_start, size_t length); }; EM_mat_setdata_dispatcher::EM_mat_setdata_dispatcher(matrix_store &store, const set_operate &_op): EM_portion_dispatcher(get_tot_len(store), fm::detail::get_portion_size(store)), op(_op), to_mat(store) { } void EM_mat_setdata_dispatcher::create_task(off_t global_start, size_t length) { size_t global_start_row, global_start_col; size_t num_rows, num_cols; if (to_mat.is_wide()) { global_start_row = 0; global_start_col = global_start; num_rows = to_mat.get_num_rows(); num_cols = length; } else { global_start_row = global_start; global_start_col = 0; num_rows = length; num_cols = to_mat.get_num_cols(); } local_matrix_store::ptr buf; if (to_mat.store_layout() == matrix_layout_t::L_COL) buf = local_matrix_store::ptr(new local_buf_col_matrix_store( global_start_row, global_start_col, num_rows, num_cols, to_mat.get_type(), -1)); else buf = local_matrix_store::ptr(new local_buf_row_matrix_store( global_start_row, global_start_col, num_rows, num_cols, to_mat.get_type(), -1)); buf->set_data(op); to_mat.write_portion_async(buf, global_start_row, global_start_col); } } void matrix_store::reset_data() { set_data(reset_op(get_type())); } void matrix_store::set_data(const set_operate &op) { size_t num_chunks = get_num_portions(); if (is_in_mem() && num_chunks == 1) { local_matrix_store::ptr buf; if (store_layout() == matrix_layout_t::L_ROW) buf = local_matrix_store::ptr(new local_buf_row_matrix_store(0, 0, get_num_rows(), get_num_cols(), get_type(), -1)); else buf = local_matrix_store::ptr(new local_buf_col_matrix_store(0, 0, get_num_rows(), get_num_cols(), get_type(), -1)); buf->set_data(op); write_portion_async(buf, 0, 0); // After computation, some matrices buffer local portions in the thread, // we should try to clean these local portions. These local portions // may contain pointers to some matrices that don't exist any more. // We also need to clean them to reduce memory consumption. // We might want to keep the memory buffer for I/O on dense matrices. if (matrix_conf.is_keep_mem_buf()) detail::local_mem_buffer::clear_bufs( detail::local_mem_buffer::MAT_PORTION); else detail::local_mem_buffer::clear_bufs(); } else if (is_in_mem()) { detail::mem_thread_pool::ptr mem_threads = detail::mem_thread_pool::get_global_mem_threads(); for (size_t i = 0; i < num_chunks; i++) { detail::local_matrix_store::ptr local_store = get_portion(i); int node_id = local_store->get_node_id(); // If the local matrix portion is not assigned to any node, // assign the tasks in round robin fashion. if (node_id < 0) node_id = i % mem_threads->get_num_nodes(); mem_threads->process_task(node_id, new set_task(local_store, op)); } mem_threads->wait4complete(); } else { mem_thread_pool::ptr threads = mem_thread_pool::get_global_mem_threads(); EM_mat_setdata_dispatcher::ptr dispatcher( new EM_mat_setdata_dispatcher(*this, op)); EM_matrix_store *em_this = dynamic_cast<EM_matrix_store *>(this); assert(em_this); em_this->start_stream(); for (size_t i = 0; i < threads->get_num_threads(); i++) { io_worker_task *task = new io_worker_task(dispatcher); const EM_object *obj = dynamic_cast<const EM_object *>(this); task->register_EM_obj(const_cast<EM_object *>(obj)); threads->process_task(i % threads->get_num_nodes(), task); } threads->wait4complete(); em_this->end_stream(); } } matrix_stream::ptr matrix_stream::create(matrix_store::ptr store) { if (store->is_in_mem()) { mem_matrix_store::ptr mem_store = std::dynamic_pointer_cast<mem_matrix_store>(store); if (mem_store == NULL) { BOOST_LOG_TRIVIAL(error) << "The in-mem matrix store isn't writable"; return matrix_stream::ptr(); } else return mem_matrix_stream::create(mem_store); } else { EM_matrix_store::ptr em_store = std::dynamic_pointer_cast<EM_matrix_store>(store); if (em_store == NULL) { BOOST_LOG_TRIVIAL(error) << "The ext-mem matrix store isn't writable"; return matrix_stream::ptr(); } else return EM_matrix_stream::create(em_store); } } matrix_store::const_ptr matrix_store::get_cols( const std::vector<off_t> &idxs) const { matrix_store::const_ptr tm = transpose(); matrix_store::const_ptr rows = tm->get_rows(idxs); if (rows == NULL) return matrix_store::const_ptr(); else return rows->transpose(); } matrix_store::const_ptr matrix_store::get_cols(off_t start, off_t end) const { if (start < 0 || end < 0 || end - start < 0) { BOOST_LOG_TRIVIAL(error) << "invalid range for selecting columns"; return matrix_store::const_ptr(); } std::vector<off_t> idxs(end - start); for (size_t i = 0; i < idxs.size(); i++) idxs[i] = start + i; return get_cols(idxs); } matrix_store::const_ptr matrix_store::get_rows(off_t start, off_t end) const { if (start < 0 || end < 0 || end - start < 0) { BOOST_LOG_TRIVIAL(error) << "invalid range for selecting rows"; return matrix_store::const_ptr(); } std::vector<off_t> idxs(end - start); for (size_t i = 0; i < idxs.size(); i++) idxs[i] = start + i; return get_rows(idxs); } bool matrix_store::share_data(const matrix_store &store) const { // By default, we can use data id to determine if two matrices have // the same data. return get_data_id() == store.get_data_id() && get_data_id() != INVALID_MAT_ID; } matrix_append::matrix_append(matrix_store::ptr store) { this->res = store; q.resize(1000); last_append = -1; written_eles = 0; empty_portion = local_matrix_store::const_ptr(new local_buf_row_matrix_store( 0, 0, 0, 0, store->get_type(), -1, false)); } void matrix_append::write_async(local_matrix_store::const_ptr portion, off_t seq_id) { if (seq_id <= last_append) { BOOST_LOG_TRIVIAL(error) << "Append a repeated portion"; return; } if (portion == NULL) portion = empty_portion; std::vector<local_matrix_store::const_ptr> data; lock.lock(); // Add the new portion to the queue. If the queue is too small, // we should resize the queue first. off_t loc = seq_id - last_append - 1; assert(loc >= 0); if ((size_t) loc >= q.size()) q.resize(q.size() * 2); q[loc] = portion; off_t start_loc = -1; if (q.front()) start_loc = written_eles; // Get the portions from the queue. while (q.front()) { auto mat = q.front(); // If the portion isn't empty. if (mat->get_num_rows() > 0 && mat->get_num_cols() > 0) data.push_back(mat); q.pop_front(); q.push_back(local_matrix_store::const_ptr()); last_append++; written_eles += mat->get_num_rows() * mat->get_num_cols(); } lock.unlock(); for (size_t i = 0; i < data.size(); i++) { assert(start_loc >= 0); // TODO this works if the result matrix is stored in memory. if (res->is_wide()) { off_t start_row = 0; off_t start_col = start_loc / res->get_num_rows(); res->write_portion_async(data[i], start_row, start_col); } else { off_t start_row = start_loc / res->get_num_cols(); off_t start_col = 0; res->write_portion_async(data[i], start_row, start_col); } start_loc += data[i]->get_num_rows() * data[i]->get_num_cols(); } } matrix_append::~matrix_append() { for (size_t i = 0; i < q.size(); i++) assert(q[i] == NULL); } void matrix_append::flush() { for (size_t i = 0; i < q.size(); i++) assert(q[i] == NULL); } } }
flashxio/FlashX
matrix/matrix_store.cpp
C++
apache-2.0
11,850
// Copyright 2018 The LUCI Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package repo import ( "context" "strings" "go.chromium.org/luci/common/data/stringset" "go.chromium.org/luci/common/errors" "go.chromium.org/luci/server/auth" api "go.chromium.org/luci/cipd/api/cipd/v1" ) // impliedRoles defines what roles are "inherited" by other roles, e.g. // WRITERs are automatically READERs, so hasRole(..., READER) should return true // for WRITERs too. // // The format is "role -> {role itself} + set of roles implied by it, perhaps // indirectly". // // If a role is missing from this map, it assumed to not be implying any roles. var impliedRoles = map[api.Role][]api.Role{ api.Role_READER: {api.Role_READER}, api.Role_WRITER: {api.Role_WRITER, api.Role_READER}, api.Role_OWNER: {api.Role_OWNER, api.Role_WRITER, api.Role_READER}, } // impliedRolesRev is reverse of impliedRoles mapping. // // The format is "role -> {role itself} + set of roles that inherit it, perhaps // indirectly". // // If a role is missing from this map, it assumed to not be inherited by // anything. var impliedRolesRev = map[api.Role]map[api.Role]struct{}{ api.Role_READER: roleSet(api.Role_READER, api.Role_WRITER, api.Role_OWNER), api.Role_WRITER: roleSet(api.Role_WRITER, api.Role_OWNER), api.Role_OWNER: roleSet(api.Role_OWNER), } func roleSet(roles ...api.Role) map[api.Role]struct{} { m := make(map[api.Role]struct{}, len(roles)) for _, r := range roles { m[r] = struct{}{} } return m } // hasRole checks whether the current caller has the given role in any of the // supplied PrefixMetadata objects. // // It understands the role inheritance defined by impliedRoles map. // // 'metas' is metadata for some prefix and all parent prefixes. It is expected // to be ordered by the prefix length (shortest first). Ordering is not really // used now, but it may change in the future. // // Returns only transient errors. func hasRole(c context.Context, metas []*api.PrefixMetadata, role api.Role) (bool, error) { caller := string(auth.CurrentIdentity(c)) // e.g. "user:abc@example.com" // E.g. if 'role' is READER, 'roles' will be {READER, WRITER, OWNER}. roles := impliedRolesRev[role] if roles == nil { roles = roleSet(role) } // Enumerate the set of principals that have any of the requested roles in any // of the prefixes. Exit early if hitting the direct match, otherwise proceed // to more expensive group membership checks. Note that we don't use isInACL // here because we want to postpone all group checks until the very end, // checking memberships in all groups mentioned in 'metas' at once. groups := stringset.New(10) // 10 is picked arbitrarily for _, meta := range metas { for _, acl := range meta.Acls { if _, ok := roles[acl.Role]; !ok { continue // not the role we are interested in } for _, p := range acl.Principals { if p == caller { return true, nil // the caller was specified in ACLs explicitly } // Is this a reference to a group? if s := strings.SplitN(p, ":", 2); len(s) == 2 && s[0] == "group" { groups.Add(s[1]) } } } } yes, err := auth.IsMember(c, groups.ToSlice()...) if err != nil { return false, errors.Annotate(err, "failed to check group memberships when checking ACLs for role %s", role).Err() } return yes, nil } // rolesInPrefix returns a union of roles the caller has in given supplied // PrefixMetadata objects. // // It understands the role inheritance defined by impliedRoles map. // // Returns only transient errors. func rolesInPrefix(c context.Context, metas []*api.PrefixMetadata) ([]api.Role, error) { roles := roleSet() for _, meta := range metas { for _, acl := range meta.Acls { if _, ok := roles[acl.Role]; ok { continue // seen this role already } switch yes, err := isInACL(c, acl); { case err != nil: return nil, err case yes: // Add acl.Role and all roles implied by it to 'roles' set. for _, r := range impliedRoles[acl.Role] { roles[r] = struct{}{} } } } } // Arrange the result in the order of Role enum definition. out := make([]api.Role, 0, len(roles)) for r := api.Role_READER; r <= api.Role_OWNER; r++ { if _, ok := roles[r]; ok { out = append(out, r) } } return out, nil } // isInACL is true if the caller is in the given access control list. func isInACL(c context.Context, acl *api.PrefixMetadata_ACL) (bool, error) { caller := string(auth.CurrentIdentity(c)) // e.g. "user:abc@example.com" var groups []string for _, p := range acl.Principals { if p == caller { return true, nil // the caller was specified in ACLs explicitly } if s := strings.SplitN(p, ":", 2); len(s) == 2 && s[0] == "group" { groups = append(groups, s[1]) } } yes, err := auth.IsMember(c, groups...) if err != nil { return false, errors.Annotate(err, "failed to check group memberships when checking ACLs").Err() } return yes, nil }
luci/luci-go
cipd/appengine/impl/repo/acl.go
GO
apache-2.0
5,464
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dubbo.registry.dubbo; import org.apache.dubbo.common.URL; import org.apache.dubbo.common.Version; import org.apache.dubbo.common.utils.ExecutorUtil; import org.apache.dubbo.common.utils.NamedThreadFactory; import org.apache.dubbo.common.utils.NetUtils; import org.apache.dubbo.registry.NotifyListener; import org.apache.dubbo.registry.RegistryService; import org.apache.dubbo.registry.support.FailbackRegistry; import org.apache.dubbo.remoting.Constants; import org.apache.dubbo.rpc.Invoker; import java.util.List; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.ReentrantLock; import static org.apache.dubbo.registry.Constants.REGISTRY_RECONNECT_PERIOD_KEY; /** * DubboRegistry */ public class DubboRegistry extends FailbackRegistry { // Reconnecting detection cycle: 3 seconds (unit:millisecond) private static final int RECONNECT_PERIOD_DEFAULT = 3 * 1000; // Scheduled executor service private final ScheduledExecutorService reconnectTimer = Executors.newScheduledThreadPool(1, new NamedThreadFactory("DubboRegistryReconnectTimer", true)); // Reconnection timer, regular check connection is available. If unavailable, unlimited reconnection. private final ScheduledFuture<?> reconnectFuture; // The lock for client acquisition process, lock the creation process of the client instance to prevent repeated clients private final ReentrantLock clientLock = new ReentrantLock(); private final Invoker<RegistryService> registryInvoker; private final RegistryService registryService; /** * The time in milliseconds the reconnectTimer will wait */ private final int reconnectPeriod; public DubboRegistry(Invoker<RegistryService> registryInvoker, RegistryService registryService) { super(registryInvoker.getUrl()); this.registryInvoker = registryInvoker; this.registryService = registryService; // Start reconnection timer this.reconnectPeriod = registryInvoker.getUrl().getParameter(REGISTRY_RECONNECT_PERIOD_KEY, RECONNECT_PERIOD_DEFAULT); reconnectFuture = reconnectTimer.scheduleWithFixedDelay(() -> { // Check and connect to the registry try { connect(); } catch (Throwable t) { // Defensive fault tolerance logger.error("Unexpected error occur at reconnect, cause: " + t.getMessage(), t); } }, reconnectPeriod, reconnectPeriod, TimeUnit.MILLISECONDS); } protected final void connect() { try { // Check whether or not it is connected if (isAvailable()) { return; } if (logger.isInfoEnabled()) { logger.info("Reconnect to registry " + getUrl()); } clientLock.lock(); try { // Double check whether or not it is connected if (isAvailable()) { return; } recover(); } finally { clientLock.unlock(); } } catch (Throwable t) { // Ignore all the exceptions and wait for the next retry if (getUrl().getParameter(Constants.CHECK_KEY, true)) { if (t instanceof RuntimeException) { throw (RuntimeException) t; } throw new RuntimeException(t.getMessage(), t); } logger.error("Failed to connect to registry " + getUrl().getAddress() + " from provider/consumer " + NetUtils.getLocalHost() + " use dubbo " + Version.getVersion() + ", cause: " + t.getMessage(), t); } } @Override public boolean isAvailable() { if (registryInvoker == null) { return false; } return registryInvoker.isAvailable(); } @Override public void destroy() { super.destroy(); try { // Cancel the reconnection timer ExecutorUtil.cancelScheduledFuture(reconnectFuture); } catch (Throwable t) { logger.warn("Failed to cancel reconnect timer", t); } registryInvoker.destroy(); ExecutorUtil.gracefulShutdown(reconnectTimer, reconnectPeriod); } @Override public void doRegister(URL url) { registryService.register(url); } @Override public void doUnregister(URL url) { registryService.unregister(url); } @Override public void doSubscribe(URL url, NotifyListener listener) { registryService.subscribe(url, listener); } @Override public void doUnsubscribe(URL url, NotifyListener listener) { registryService.unsubscribe(url, listener); } @Override public List<URL> lookup(URL url) { return registryService.lookup(url); } }
lovepoem/dubbo
dubbo-registry/dubbo-registry-default/src/main/java/org/apache/dubbo/registry/dubbo/DubboRegistry.java
Java
apache-2.0
5,956
# The following comment should be removed at some point in the future. # mypy: disallow-untyped-defs=False from __future__ import absolute_import import logging import os.path import re from pip._vendor.packaging.version import parse as parse_version from pip._vendor.six.moves.urllib import parse as urllib_parse from pip._vendor.six.moves.urllib import request as urllib_request from pip._internal.exceptions import BadCommand, InstallationError from pip._internal.utils.misc import display_path, hide_url from pip._internal.utils.subprocess import make_command from pip._internal.utils.temp_dir import TempDirectory from pip._internal.utils.typing import MYPY_CHECK_RUNNING from pip._internal.vcs.versioncontrol import ( RemoteNotFoundError, VersionControl, find_path_to_setup_from_repo_root, vcs, ) if MYPY_CHECK_RUNNING: from typing import Optional, Tuple from pip._internal.utils.misc import HiddenText from pip._internal.vcs.versioncontrol import AuthInfo, RevOptions urlsplit = urllib_parse.urlsplit urlunsplit = urllib_parse.urlunsplit logger = logging.getLogger(__name__) HASH_REGEX = re.compile('^[a-fA-F0-9]{40}$') def looks_like_hash(sha): return bool(HASH_REGEX.match(sha)) class Git(VersionControl): name = 'git' dirname = '.git' repo_name = 'clone' schemes = ( 'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file', ) # Prevent the user's environment variables from interfering with pip: # https://github.com/pypa/pip/issues/1130 unset_environ = ('GIT_DIR', 'GIT_WORK_TREE') default_arg_rev = 'HEAD' @staticmethod def get_base_rev_args(rev): return [rev] def is_immutable_rev_checkout(self, url, dest): # type: (str, str) -> bool _, rev_options = self.get_url_rev_options(hide_url(url)) if not rev_options.rev: return False if not self.is_commit_id_equal(dest, rev_options.rev): # the current commit is different from rev, # which means rev was something else than a commit hash return False # return False in the rare case rev is both a commit hash # and a tag or a branch; we don't want to cache in that case # because that branch/tag could point to something else in the future is_tag_or_branch = bool( self.get_revision_sha(dest, rev_options.rev)[0] ) return not is_tag_or_branch def get_git_version(self): VERSION_PFX = 'git version ' version = self.run_command( ['version'], show_stdout=False, stdout_only=True ) if version.startswith(VERSION_PFX): version = version[len(VERSION_PFX):].split()[0] else: version = '' # get first 3 positions of the git version because # on windows it is x.y.z.windows.t, and this parses as # LegacyVersion which always smaller than a Version. version = '.'.join(version.split('.')[:3]) return parse_version(version) @classmethod def get_current_branch(cls, location): """ Return the current branch, or None if HEAD isn't at a branch (e.g. detached HEAD). """ # git-symbolic-ref exits with empty stdout if "HEAD" is a detached # HEAD rather than a symbolic ref. In addition, the -q causes the # command to exit with status code 1 instead of 128 in this case # and to suppress the message to stderr. args = ['symbolic-ref', '-q', 'HEAD'] output = cls.run_command( args, extra_ok_returncodes=(1, ), show_stdout=False, stdout_only=True, cwd=location, ) ref = output.strip() if ref.startswith('refs/heads/'): return ref[len('refs/heads/'):] return None def export(self, location, url): # type: (str, HiddenText) -> None """Export the Git repository at the url to the destination location""" if not location.endswith('/'): location = location + '/' with TempDirectory(kind="export") as temp_dir: self.unpack(temp_dir.path, url=url) self.run_command( ['checkout-index', '-a', '-f', '--prefix', location], show_stdout=False, cwd=temp_dir.path ) @classmethod def get_revision_sha(cls, dest, rev): """ Return (sha_or_none, is_branch), where sha_or_none is a commit hash if the revision names a remote branch or tag, otherwise None. Args: dest: the repository directory. rev: the revision name. """ # Pass rev to pre-filter the list. output = cls.run_command( ['show-ref', rev], cwd=dest, show_stdout=False, stdout_only=True, on_returncode='ignore', ) refs = {} for line in output.strip().splitlines(): try: sha, ref = line.split() except ValueError: # Include the offending line to simplify troubleshooting if # this error ever occurs. raise ValueError('unexpected show-ref line: {!r}'.format(line)) refs[ref] = sha branch_ref = 'refs/remotes/origin/{}'.format(rev) tag_ref = 'refs/tags/{}'.format(rev) sha = refs.get(branch_ref) if sha is not None: return (sha, True) sha = refs.get(tag_ref) return (sha, False) @classmethod def _should_fetch(cls, dest, rev): """ Return true if rev is a ref or is a commit that we don't have locally. Branches and tags are not considered in this method because they are assumed to be always available locally (which is a normal outcome of ``git clone`` and ``git fetch --tags``). """ if rev.startswith("refs/"): # Always fetch remote refs. return True if not looks_like_hash(rev): # Git fetch would fail with abbreviated commits. return False if cls.has_commit(dest, rev): # Don't fetch if we have the commit locally. return False return True @classmethod def resolve_revision(cls, dest, url, rev_options): # type: (str, HiddenText, RevOptions) -> RevOptions """ Resolve a revision to a new RevOptions object with the SHA1 of the branch, tag, or ref if found. Args: rev_options: a RevOptions object. """ rev = rev_options.arg_rev # The arg_rev property's implementation for Git ensures that the # rev return value is always non-None. assert rev is not None sha, is_branch = cls.get_revision_sha(dest, rev) if sha is not None: rev_options = rev_options.make_new(sha) rev_options.branch_name = rev if is_branch else None return rev_options # Do not show a warning for the common case of something that has # the form of a Git commit hash. if not looks_like_hash(rev): logger.warning( "Did not find branch or tag '%s', assuming revision or ref.", rev, ) if not cls._should_fetch(dest, rev): return rev_options # fetch the requested revision cls.run_command( make_command('fetch', '-q', url, rev_options.to_args()), cwd=dest, ) # Change the revision to the SHA of the ref we fetched sha = cls.get_revision(dest, rev='FETCH_HEAD') rev_options = rev_options.make_new(sha) return rev_options @classmethod def is_commit_id_equal(cls, dest, name): """ Return whether the current commit hash equals the given name. Args: dest: the repository directory. name: a string name. """ if not name: # Then avoid an unnecessary subprocess call. return False return cls.get_revision(dest) == name def fetch_new(self, dest, url, rev_options): # type: (str, HiddenText, RevOptions) -> None rev_display = rev_options.to_display() logger.info('Cloning %s%s to %s', url, rev_display, display_path(dest)) self.run_command(make_command('clone', '-q', url, dest)) if rev_options.rev: # Then a specific revision was requested. rev_options = self.resolve_revision(dest, url, rev_options) branch_name = getattr(rev_options, 'branch_name', None) if branch_name is None: # Only do a checkout if the current commit id doesn't match # the requested revision. if not self.is_commit_id_equal(dest, rev_options.rev): cmd_args = make_command( 'checkout', '-q', rev_options.to_args(), ) self.run_command(cmd_args, cwd=dest) elif self.get_current_branch(dest) != branch_name: # Then a specific branch was requested, and that branch # is not yet checked out. track_branch = 'origin/{}'.format(branch_name) cmd_args = [ 'checkout', '-b', branch_name, '--track', track_branch, ] self.run_command(cmd_args, cwd=dest) #: repo may contain submodules self.update_submodules(dest) def switch(self, dest, url, rev_options): # type: (str, HiddenText, RevOptions) -> None self.run_command( make_command('config', 'remote.origin.url', url), cwd=dest, ) cmd_args = make_command('checkout', '-q', rev_options.to_args()) self.run_command(cmd_args, cwd=dest) self.update_submodules(dest) def update(self, dest, url, rev_options): # type: (str, HiddenText, RevOptions) -> None # First fetch changes from the default remote if self.get_git_version() >= parse_version('1.9.0'): # fetch tags in addition to everything else self.run_command(['fetch', '-q', '--tags'], cwd=dest) else: self.run_command(['fetch', '-q'], cwd=dest) # Then reset to wanted revision (maybe even origin/master) rev_options = self.resolve_revision(dest, url, rev_options) cmd_args = make_command('reset', '--hard', '-q', rev_options.to_args()) self.run_command(cmd_args, cwd=dest) #: update submodules self.update_submodules(dest) @classmethod def get_remote_url(cls, location): """ Return URL of the first remote encountered. Raises RemoteNotFoundError if the repository does not have a remote url configured. """ # We need to pass 1 for extra_ok_returncodes since the command # exits with return code 1 if there are no matching lines. stdout = cls.run_command( ['config', '--get-regexp', r'remote\..*\.url'], extra_ok_returncodes=(1, ), show_stdout=False, stdout_only=True, cwd=location, ) remotes = stdout.splitlines() try: found_remote = remotes[0] except IndexError: raise RemoteNotFoundError for remote in remotes: if remote.startswith('remote.origin.url '): found_remote = remote break url = found_remote.split(' ')[1] return url.strip() @classmethod def has_commit(cls, location, rev): """ Check if rev is a commit that is available in the local repository. """ try: cls.run_command( ['rev-parse', '-q', '--verify', "sha^" + rev], cwd=location, log_failed_cmd=False, ) except InstallationError: return False else: return True @classmethod def get_revision(cls, location, rev=None): if rev is None: rev = 'HEAD' current_rev = cls.run_command( ['rev-parse', rev], show_stdout=False, stdout_only=True, cwd=location, ) return current_rev.strip() @classmethod def get_subdirectory(cls, location): """ Return the path to setup.py, relative to the repo root. Return None if setup.py is in the repo root. """ # find the repo root git_dir = cls.run_command( ['rev-parse', '--git-dir'], show_stdout=False, stdout_only=True, cwd=location, ).strip() if not os.path.isabs(git_dir): git_dir = os.path.join(location, git_dir) repo_root = os.path.abspath(os.path.join(git_dir, '..')) return find_path_to_setup_from_repo_root(location, repo_root) @classmethod def get_url_rev_and_auth(cls, url): # type: (str) -> Tuple[str, Optional[str], AuthInfo] """ Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'. That's required because although they use SSH they sometimes don't work with a ssh:// scheme (e.g. GitHub). But we need a scheme for parsing. Hence we remove it again afterwards and return it as a stub. """ # Works around an apparent Git bug # (see https://article.gmane.org/gmane.comp.version-control.git/146500) scheme, netloc, path, query, fragment = urlsplit(url) if scheme.endswith('file'): initial_slashes = path[:-len(path.lstrip('/'))] newpath = ( initial_slashes + urllib_request.url2pathname(path) .replace('\\', '/').lstrip('/') ) after_plus = scheme.find('+') + 1 url = scheme[:after_plus] + urlunsplit( (scheme[after_plus:], netloc, newpath, query, fragment), ) if '://' not in url: assert 'file:' not in url url = url.replace('git+', 'git+ssh://') url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url) url = url.replace('ssh://', '') else: url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url) return url, rev, user_pass @classmethod def update_submodules(cls, location): if not os.path.exists(os.path.join(location, '.gitmodules')): return cls.run_command( ['submodule', 'update', '--init', '--recursive', '-q'], cwd=location, ) @classmethod def get_repository_root(cls, location): loc = super(Git, cls).get_repository_root(location) if loc: return loc try: r = cls.run_command( ['rev-parse', '--show-toplevel'], cwd=location, show_stdout=False, stdout_only=True, on_returncode='raise', log_failed_cmd=False, ) except BadCommand: logger.debug("could not determine if %s is under git control " "because git is not available", location) return None except InstallationError: return None return os.path.normpath(r.rstrip('\r\n')) vcs.register(Git)
pantsbuild/pex
pex/vendor/_vendored/pip/pip/_internal/vcs/git.py
Python
apache-2.0
15,599
# coding: utf-8 # # Copyright (c) 2013 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # Base on code in migrate/changeset/databases/sqlite.py which is under # the following license: # # The MIT License # # Copyright (c) 2009 Evan Rosson, Jan Dittberner, Domen Kožar # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. import os import re from migrate.changeset import ansisql from migrate.changeset.databases import sqlite from migrate import exceptions as versioning_exceptions from migrate.versioning import api as versioning_api from migrate.versioning.repository import Repository import sqlalchemy from sqlalchemy.schema import UniqueConstraint from essential.db import exception from essential.gettextutils import _ def _get_unique_constraints(self, table): """Retrieve information about existing unique constraints of the table This feature is needed for _recreate_table() to work properly. Unfortunately, it's not available in sqlalchemy 0.7.x/0.8.x. """ data = table.metadata.bind.execute( """SELECT sql FROM sqlite_master WHERE type='table' AND name=:table_name""", table_name=table.name ).fetchone()[0] UNIQUE_PATTERN = "CONSTRAINT (\w+) UNIQUE \(([^\)]+)\)" return [ UniqueConstraint( *[getattr(table.columns, c.strip(' "')) for c in cols.split(",")], name=name ) for name, cols in re.findall(UNIQUE_PATTERN, data) ] def _recreate_table(self, table, column=None, delta=None, omit_uniques=None): """Recreate the table properly Unlike the corresponding original method of sqlalchemy-migrate this one doesn't drop existing unique constraints when creating a new one. """ table_name = self.preparer.format_table(table) # we remove all indexes so as not to have # problems during copy and re-create for index in table.indexes: index.drop() # reflect existing unique constraints for uc in self._get_unique_constraints(table): table.append_constraint(uc) # omit given unique constraints when creating a new table if required table.constraints = set([ cons for cons in table.constraints if omit_uniques is None or cons.name not in omit_uniques ]) self.append('ALTER TABLE %s RENAME TO migration_tmp' % table_name) self.execute() insertion_string = self._modify_table(table, column, delta) table.create(bind=self.connection) self.append(insertion_string % {'table_name': table_name}) self.execute() self.append('DROP TABLE migration_tmp') self.execute() def _visit_migrate_unique_constraint(self, *p, **k): """Drop the given unique constraint The corresponding original method of sqlalchemy-migrate just raises NotImplemented error """ self.recreate_table(p[0].table, omit_uniques=[p[0].name]) def patch_migrate(): """A workaround for SQLite's inability to alter things SQLite abilities to alter tables are very limited (please read http://www.sqlite.org/lang_altertable.html for more details). E. g. one can't drop a column or a constraint in SQLite. The workaround for this is to recreate the original table omitting the corresponding constraint (or column). sqlalchemy-migrate library has recreate_table() method that implements this workaround, but it does it wrong: - information about unique constraints of a table is not retrieved. So if you have a table with one unique constraint and a migration adding another one you will end up with a table that has only the latter unique constraint, and the former will be lost - dropping of unique constraints is not supported at all The proper way to fix this is to provide a pull-request to sqlalchemy-migrate, but the project seems to be dead. So we can go on with monkey-patching of the lib at least for now. """ # this patch is needed to ensure that recreate_table() doesn't drop # existing unique constraints of the table when creating a new one helper_cls = sqlite.SQLiteHelper helper_cls.recreate_table = _recreate_table helper_cls._get_unique_constraints = _get_unique_constraints # this patch is needed to be able to drop existing unique constraints constraint_cls = sqlite.SQLiteConstraintDropper constraint_cls.visit_migrate_unique_constraint = \ _visit_migrate_unique_constraint constraint_cls.__bases__ = (ansisql.ANSIColumnDropper, sqlite.SQLiteConstraintGenerator) def db_sync(engine, abs_path, version=None, init_version=0, sanity_check=True): """Upgrade or downgrade a database. Function runs the upgrade() or downgrade() functions in change scripts. :param engine: SQLAlchemy engine instance for a given database :param abs_path: Absolute path to migrate repository. :param version: Database will upgrade/downgrade until this version. If None - database will update to the latest available version. :param init_version: Initial database version :param sanity_check: Require schema sanity checking for all tables """ if version is not None: try: version = int(version) except ValueError: raise exception.DbMigrationError( message=_("version should be an integer")) current_version = db_version(engine, abs_path, init_version) repository = _find_migrate_repo(abs_path) if sanity_check: _db_schema_sanity_check(engine) if version is None or version > current_version: return versioning_api.upgrade(engine, repository, version) else: return versioning_api.downgrade(engine, repository, version) def _db_schema_sanity_check(engine): """Ensure all database tables were created with required parameters. :param engine: SQLAlchemy engine instance for a given database """ if engine.name == 'mysql': onlyutf8_sql = ('SELECT TABLE_NAME,TABLE_COLLATION ' 'from information_schema.TABLES ' 'where TABLE_SCHEMA=%s and ' 'TABLE_COLLATION NOT LIKE "%%utf8%%"') table_names = [res[0] for res in engine.execute(onlyutf8_sql, engine.url.database)] if len(table_names) > 0: raise ValueError(_('Tables "%s" have non utf8 collation, ' 'please make sure all tables are CHARSET=utf8' ) % ','.join(table_names)) def db_version(engine, abs_path, init_version): """Show the current version of the repository. :param engine: SQLAlchemy engine instance for a given database :param abs_path: Absolute path to migrate repository :param version: Initial database version """ repository = _find_migrate_repo(abs_path) try: return versioning_api.db_version(engine, repository) except versioning_exceptions.DatabaseNotControlledError: meta = sqlalchemy.MetaData() meta.reflect(bind=engine) tables = meta.tables if len(tables) == 0 or 'alembic_version' in tables: db_version_control(engine, abs_path, version=init_version) return versioning_api.db_version(engine, repository) else: raise exception.DbMigrationError( message=_( "The database is not under version control, but has " "tables. Please stamp the current version of the schema " "manually.")) def db_version_control(engine, abs_path, version=None): """Mark a database as under this repository's version control. Once a database is under version control, schema changes should only be done via change scripts in this repository. :param engine: SQLAlchemy engine instance for a given database :param abs_path: Absolute path to migrate repository :param version: Initial database version """ repository = _find_migrate_repo(abs_path) versioning_api.version_control(engine, repository, version) return version def _find_migrate_repo(abs_path): """Get the project's change script repository :param abs_path: Absolute path to migrate repository """ if not os.path.exists(abs_path): raise exception.DbMigrationError("Path %s not found" % abs_path) return Repository(abs_path)
gaolichuang/py-essential
essential/db/sqlalchemy/migration.py
Python
apache-2.0
10,186
/** * Copyright 2016 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ goog.provide('audioCat.audio.record.RecordingJobCreatedEvent'); goog.require('audioCat.audio.record.Event'); goog.require('audioCat.utility.Event'); /** * An event marking the creation of a new recording job. * @param {!audioCat.audio.record.RecordingJob} recordingJob The newly created * job recording. * @constructor * @extends {audioCat.utility.Event} */ audioCat.audio.record.RecordingJobCreatedEvent = function(recordingJob) { goog.base(this, audioCat.audio.record.Event.DEFAULT_RECORDING_JOB_CREATED); /** * The newly made recording job. * @type {!audioCat.audio.record.RecordingJob} */ this.recordingJob = recordingJob; }; goog.inherits( audioCat.audio.record.RecordingJobCreatedEvent, audioCat.utility.Event);
google/beautiful-audio-editor
src/js/original/audio/record/recording-job-created-event.js
JavaScript
apache-2.0
1,347
package org.cloudfoundry.autoscaler.data.couchdb.dao.impl; import java.util.ArrayList; import java.util.List; import org.apache.log4j.Logger; import org.cloudfoundry.autoscaler.data.couchdb.dao.AppInstanceMetricsDAO; import org.cloudfoundry.autoscaler.data.couchdb.dao.base.TypedCouchDbRepositorySupport; import org.cloudfoundry.autoscaler.data.couchdb.document.AppInstanceMetrics; import org.ektorp.ComplexKey; import org.ektorp.CouchDbConnector; import org.ektorp.ViewQuery; import org.ektorp.support.View; public class AppInstanceMetricsDAOImpl extends CommonDAOImpl implements AppInstanceMetricsDAO { @View(name = "byAll", map = "function(doc) { if (doc.type == 'AppInstanceMetrics' ) emit([doc.appId, doc.appType, doc.timestamp], doc._id)}") private static class AppInstanceMetricsRepository_All extends TypedCouchDbRepositorySupport<AppInstanceMetrics> { public AppInstanceMetricsRepository_All(CouchDbConnector db) { super(AppInstanceMetrics.class, db, "AppInstanceMetrics_byAll"); } public List<AppInstanceMetrics> getAllRecords() { return queryView("byAll"); } } @View(name = "by_appId", map = "function(doc) { if (doc.type=='AppInstanceMetrics' && doc.appId) { emit([doc.appId], doc._id) } }") private static class AppInstanceMetricsRepository_ByAppId extends TypedCouchDbRepositorySupport<AppInstanceMetrics> { public AppInstanceMetricsRepository_ByAppId(CouchDbConnector db) { super(AppInstanceMetrics.class, db, "AppInstanceMetrics_ByAppId"); } public List<AppInstanceMetrics> findByAppId(String appId) { ComplexKey key = ComplexKey.of(appId); return queryView("by_appId", key); } } @View(name = "by_appId_between", map = "function(doc) { if (doc.type=='AppInstanceMetrics' && doc.appId && doc.timestamp) { emit([doc.appId, doc.timestamp], doc._id) } }") private static class AppInstanceMetricsRepository_ByAppIdBetween extends TypedCouchDbRepositorySupport<AppInstanceMetrics> { public AppInstanceMetricsRepository_ByAppIdBetween(CouchDbConnector db) { super(AppInstanceMetrics.class, db, "AppInstanceMetrics_ByAppIdBetween"); } public List<AppInstanceMetrics> findByAppIdBetween(String appId, long startTimestamp, long endTimestamp) throws Exception { ComplexKey startKey = ComplexKey.of(appId, startTimestamp); ComplexKey endKey = ComplexKey.of(appId, endTimestamp); ViewQuery q = createQuery("by_appId_between").includeDocs(true).startKey(startKey).endKey(endKey); List<AppInstanceMetrics> returnvalue = null; String[] input = beforeConnection("QUERY", new String[] { "by_appId_between", appId, String.valueOf(startTimestamp), String.valueOf(endTimestamp) }); try { returnvalue = db.queryView(q, AppInstanceMetrics.class); } catch (Exception e) { e.printStackTrace(); } afterConnection(input); return returnvalue; } } @View(name = "by_serviceId_before", map = "function(doc) { if (doc.type=='AppInstanceMetrics' && doc.serviceId && doc.timestamp) { emit([ doc.serviceId, doc.timestamp], doc._id) } }") private static class AppInstanceMetricsRepository_ByServiceId_Before extends TypedCouchDbRepositorySupport<AppInstanceMetrics> { public AppInstanceMetricsRepository_ByServiceId_Before(CouchDbConnector db) { super(AppInstanceMetrics.class, db, "AppInstanceMetrics_ByServiceId"); } public List<AppInstanceMetrics> findByServiceIdBefore(String serviceId, long olderThan) throws Exception { ComplexKey startKey = ComplexKey.of(serviceId, 0); ComplexKey endKey = ComplexKey.of(serviceId, olderThan); ViewQuery q = createQuery("by_serviceId_before").includeDocs(true).startKey(startKey).endKey(endKey); List<AppInstanceMetrics> returnvalue = null; String[] input = beforeConnection("QUERY", new String[] { "by_serviceId_before", serviceId, String.valueOf(0), String.valueOf(olderThan) }); try { returnvalue = db.queryView(q, AppInstanceMetrics.class); } catch (Exception e) { e.printStackTrace(); } afterConnection(input); return returnvalue; } } private static final Logger logger = Logger.getLogger(AppInstanceMetricsDAOImpl.class); private AppInstanceMetricsRepository_All metricsRepoAll; private AppInstanceMetricsRepository_ByAppId metricsRepoByAppId; private AppInstanceMetricsRepository_ByAppIdBetween metricsRepoByAppIdBetween; private AppInstanceMetricsRepository_ByServiceId_Before metricsRepoByServiceIdBefore; public AppInstanceMetricsDAOImpl(CouchDbConnector db) { metricsRepoAll = new AppInstanceMetricsRepository_All(db); metricsRepoByAppId = new AppInstanceMetricsRepository_ByAppId(db); metricsRepoByAppIdBetween = new AppInstanceMetricsRepository_ByAppIdBetween(db); metricsRepoByServiceIdBefore = new AppInstanceMetricsRepository_ByServiceId_Before(db); } public AppInstanceMetricsDAOImpl(CouchDbConnector db, boolean initDesignDocument) { this(db); if (initDesignDocument) { try { initAllRepos(); } catch (Exception e) { logger.error(e.getMessage(), e); } } } @Override public List<AppInstanceMetrics> findAll() { // TODO Auto-generated method stub return this.metricsRepoAll.getAllRecords(); } @Override public List<AppInstanceMetrics> findByAppId(String appId) { // TODO Auto-generated method stub return this.metricsRepoByAppId.findByAppId(appId); } @Override public List<AppInstanceMetrics> findByAppIdBetween(String appId, long startTimestamp, long endTimestamp) throws Exception { // TODO Auto-generated method stub return this.metricsRepoByAppIdBetween.findByAppIdBetween(appId, startTimestamp, endTimestamp); } @Override public List<AppInstanceMetrics> findByServiceIdBefore(String serviceId, long olderThan) throws Exception { // TODO Auto-generated method stub return this.metricsRepoByServiceIdBefore.findByServiceIdBefore(serviceId, olderThan); } @Override public List<AppInstanceMetrics> findByAppIdAfter(String appId, long timestamp) throws Exception { try { return findByAppIdBetween(appId, timestamp, System.currentTimeMillis()); } catch (Exception e) { logger.error(e.getMessage(), e); } return null; } @SuppressWarnings("unchecked") @Override public <T> TypedCouchDbRepositorySupport<T> getDefaultRepo() { // TODO Auto-generated method stub return (TypedCouchDbRepositorySupport<T>) this.metricsRepoAll; } @SuppressWarnings("unchecked") @Override public <T> List<TypedCouchDbRepositorySupport<T>> getAllRepos() { // TODO Auto-generated method stub List<TypedCouchDbRepositorySupport<T>> repoList = new ArrayList<TypedCouchDbRepositorySupport<T>>(); repoList.add((TypedCouchDbRepositorySupport<T>) this.metricsRepoAll); repoList.add((TypedCouchDbRepositorySupport<T>) this.metricsRepoByAppId); repoList.add((TypedCouchDbRepositorySupport<T>) this.metricsRepoByAppIdBetween); repoList.add((TypedCouchDbRepositorySupport<T>) this.metricsRepoByServiceIdBefore); return repoList; } }
cfibmers/open-Autoscaler
server/src/main/java/org/cloudfoundry/autoscaler/data/couchdb/dao/impl/AppInstanceMetricsDAOImpl.java
Java
apache-2.0
6,943
package jef.common.wrapper; import java.io.Serializable; public interface IHolder<T> extends Serializable{ T get(); void set(T obj); }
xuse/ef-orm
common-core/src/main/java/jef/common/wrapper/IHolder.java
Java
apache-2.0
139
/* * Copyright 2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.restassured.response; public interface Validatable<T extends ValidatableResponseOptions<T, R>, R extends ResponseBody<R> & ResponseOptions<R>> { /** * Returns a validatable response that's lets you validate the response. Usage example: * <p/> * <pre> * given(). * param("firstName", "John"). * param("lastName", "Doe"). * when(). * get("/greet"). * then(). * body("greeting", equalTo("John Doe")); * </pre> * * @return A validatable response */ T then(); }
jayway/rest-assured
rest-assured/src/main/java/io/restassured/response/Validatable.java
Java
apache-2.0
1,199
/* * Copyright 2010-2014 Ning, Inc. * Copyright 2014 The Billing Project, LLC * * Ning licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.killbill.billing.plugin.meter.timeline.shutdown; import java.util.HashMap; import java.util.Map; import org.joda.time.DateTime; /** * This class is used solely as a Json mapping class when saving timelines in a database * blob on shutdown, and restoring them on startup. * <p/> * The Map<Integer, Map<Integer, DateTime>> maps from sourceId to eventCategoryId to startTime. */ public class StartTimes { private final DateTime timeInserted; private final Map<Integer, Map<Integer, DateTime>> startTimesMap; private DateTime minStartTime; public StartTimes(final DateTime timeInserted, final Map<Integer, Map<Integer, DateTime>> startTimesMap) { this.timeInserted = timeInserted; this.startTimesMap = startTimesMap; DateTime minDateTime = new DateTime(Long.MAX_VALUE); for (final Map<Integer, DateTime> categoryMap : startTimesMap.values()) { for (final DateTime startTime : categoryMap.values()) { if (minDateTime.isAfter(startTime)) { minDateTime = startTime; } } } this.minStartTime = minDateTime; } public StartTimes() { this.timeInserted = new DateTime(); minStartTime = new DateTime(Long.MAX_VALUE); this.startTimesMap = new HashMap<Integer, Map<Integer, DateTime>>(); } public void addTime(final int sourceId, final int categoryId, final DateTime dateTime) { Map<Integer, DateTime> sourceTimes = startTimesMap.get(sourceId); if (sourceTimes == null) { sourceTimes = new HashMap<Integer, DateTime>(); startTimesMap.put(sourceId, sourceTimes); } sourceTimes.put(categoryId, dateTime); if (dateTime.isBefore(minStartTime)) { minStartTime = dateTime; } } public DateTime getStartTimeForSourceIdAndCategoryId(final int sourceId, final int categoryId) { final Map<Integer, DateTime> sourceTimes = startTimesMap.get(sourceId); if (sourceTimes != null) { return sourceTimes.get(categoryId); } else { return null; } } public Map<Integer, Map<Integer, DateTime>> getStartTimesMap() { return startTimesMap; } public DateTime getTimeInserted() { return timeInserted; } public DateTime getMinStartTime() { return minStartTime; } }
killbill/killbill-meter-plugin
src/main/java/org/killbill/billing/plugin/meter/timeline/shutdown/StartTimes.java
Java
apache-2.0
3,097
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sis.util; import java.util.Arrays; import java.nio.CharBuffer; import org.opengis.metadata.citation.Citation; // For javadoc import org.opengis.referencing.IdentifiedObject; // For javadoc import static java.lang.Character.*; /** * Static methods working with {@link CharSequence} instances. Some methods defined in this * class duplicate the functionalities already provided in the standard {@link String} class, * but works on a generic {@code CharSequence} instance instead of {@code String}. * * <h2>Unicode support</h2> * Every methods defined in this class work on <cite>code points</cite> instead of characters * when appropriate. Consequently those methods should behave correctly with characters outside * the <cite>Basic Multilingual Plane</cite> (BMP). * * <h2>Policy on space characters</h2> * Java defines two methods for testing if a character is a white space: * {@link Character#isWhitespace(int)} and {@link Character#isSpaceChar(int)}. * Those two methods differ in the way they handle {@linkplain Characters#NO_BREAK_SPACE * no-break spaces}, tabulations and line feeds. The general policy in the SIS library is: * * <ul> * <li>Use {@code isWhitespace(…)} when separating entities (words, numbers, tokens, <i>etc.</i>) * in a list. Using that method, characters separated by a no-break space are considered as * part of the same entity.</li> * <li>Use {@code isSpaceChar(…)} when parsing a single entity, for example a single word. * Using this method, no-break spaces are considered as part of the entity while line * feeds or tabulations are entity boundaries.</li> * </ul> * * <div class="note"><b>Example:</b> * Numbers formatted in the French locale use no-break spaces as group separators. When parsing a list of numbers, * ordinary spaces around the numbers may need to be ignored, but no-break spaces shall be considered as part of the * numbers. Consequently {@code isWhitespace(…)} is appropriate for skipping spaces <em>between</em> the numbers. * But if there is spaces to skip <em>inside</em> a single number, then {@code isSpaceChar(…)} is a good choice * for accepting no-break spaces and for stopping the parse operation at tabulations or line feed character. * A tabulation or line feed between two characters is very likely to separate two distinct values.</div> * * In practice, the {@link java.text.Format} implementations in the SIS library typically use * {@code isSpaceChar(…)} while most of the rest of the SIS library, including this * {@code CharSequences} class, consistently uses {@code isWhitespace(…)}. * * <p>Note that the {@link String#trim()} method doesn't follow any of those policies and should * generally be avoided. That {@code trim()} method removes every ISO control characters without * distinction about whether the characters are space or not, and ignore all Unicode spaces. * The {@link #trimWhitespaces(String)} method defined in this class can be used as an alternative.</p> * * <h2>Handling of null values</h2> * Most methods in this class accept a {@code null} {@code CharSequence} argument. In such cases * the method return value is either a {@code null} {@code CharSequence}, an empty array, or a * {@code 0} or {@code false} primitive type calculated as if the input was an empty string. * * @author Martin Desruisseaux (Geomatys) * @version 1.1 * * @see StringBuilders * * @since 0.3 * @module */ public final class CharSequences extends Static { /** * An array of zero-length. This constant play a role equivalents to * {@link java.util.Collections#EMPTY_LIST}. */ public static final String[] EMPTY_ARRAY = new String[0]; /** * An array of strings containing only white spaces. String lengths are equal to their * index in the {@code spaces} array. For example, {@code spaces[4]} contains a string * of length 4. Strings are constructed only when first needed. */ private static final String[] SPACES = new String[10]; /** * Do not allow instantiation of this class. */ private CharSequences() { } /** * Returns the code point after the given index. This method completes * {@link Character#codePointBefore(CharSequence, int)} but is rarely used because slightly * inefficient (in most cases, the code point at {@code index} is known together with the * corresponding {@code charCount(int)} value, so the method calls should be unnecessary). */ private static int codePointAfter(final CharSequence text, final int index) { return codePointAt(text, index + charCount(codePointAt(text, index))); } /** * Returns a character sequence of the specified length filled with white spaces. * * <h4>Use case</h4> * This method is typically invoked for performing right-alignment of text on the * {@linkplain java.io.Console console} or other device using monospaced font. * Callers compute a value for the {@code length} argument by (<var>desired width</var> - <var>used width</var>). * Since the <var>used width</var> value may be greater than expected, this method handle negative {@code length} * values as if the value was zero. * * @param length the string length. Negative values are clamped to 0. * @return a string of length {@code length} filled with white spaces. */ public static CharSequence spaces(final int length) { /* * No need to synchronize. In the unlikely event of two threads calling this method * at the same time and the two calls creating a new string, the String.intern() call * will take care of canonicalizing the strings. */ if (length <= 0) { return ""; } if (length < SPACES.length) { String s = SPACES[length - 1]; if (s == null) { final char[] spaces = new char[length]; Arrays.fill(spaces, ' '); s = new String(spaces).intern(); SPACES[length - 1] = s; } return s; } return new CharSequence() { @Override public int length() { return length; } @Override public char charAt(int index) { ArgumentChecks.ensureValidIndex(length, index); return ' '; } @Override public CharSequence subSequence(final int start, final int end) { ArgumentChecks.ensureValidIndexRange(length, start, end); final int n = end - start; return (n == length) ? this : spaces(n); } @Override public String toString() { final char[] array = new char[length]; Arrays.fill(array, ' '); return new String(array); } }; } /** * Returns the {@linkplain CharSequence#length() length} of the given characters sequence, * or 0 if {@code null}. * * @param text the character sequence from which to get the length, or {@code null}. * @return the length of the character sequence, or 0 if the argument is {@code null}. */ public static int length(final CharSequence text) { return (text != null) ? text.length() : 0; } /** * Returns the number of Unicode code points in the given characters sequence, * or 0 if {@code null}. Unpaired surrogates within the text count as one code * point each. * * @param text the character sequence from which to get the count, or {@code null}. * @return the number of Unicode code points, or 0 if the argument is {@code null}. * * @see #codePointCount(CharSequence, int, int) */ public static int codePointCount(final CharSequence text) { return (text != null) ? codePointCount(text, 0, text.length()) : 0; } /** * Returns the number of Unicode code points in the given characters sub-sequence, * or 0 if {@code null}. Unpaired surrogates within the text count as one code * point each. * * <p>This method performs the same work than the standard * {@link Character#codePointCount(CharSequence, int, int)} method, except that it tries * to delegate to the optimized methods from the {@link String}, {@link StringBuilder}, * {@link StringBuffer} or {@link CharBuffer} classes if possible.</p> * * @param text the character sequence from which to get the count, or {@code null}. * @param fromIndex the index from which to start the computation. * @param toIndex the index after the last character to take in account. * @return the number of Unicode code points, or 0 if the argument is {@code null}. * * @see Character#codePointCount(CharSequence, int, int) * @see String#codePointCount(int, int) * @see StringBuilder#codePointCount(int, int) */ public static int codePointCount(final CharSequence text, final int fromIndex, final int toIndex) { if (text == null) return 0; if (text instanceof String) return ((String) text).codePointCount(fromIndex, toIndex); if (text instanceof StringBuilder) return ((StringBuilder) text).codePointCount(fromIndex, toIndex); if (text instanceof StringBuffer) return ((StringBuffer) text).codePointCount(fromIndex, toIndex); if (text instanceof CharBuffer) { final CharBuffer buffer = (CharBuffer) text; if (buffer.hasArray() && !buffer.isReadOnly()) { final int position = buffer.position(); return Character.codePointCount(buffer.array(), position + fromIndex, position + toIndex); } } return Character.codePointCount(text, fromIndex, toIndex); } /** * Returns the number of occurrences of the {@code toSearch} string in the given {@code text}. * The search is case-sensitive. * * @param text the character sequence to count occurrences, or {@code null}. * @param toSearch the string to search in the given {@code text}. * It shall contain at least one character. * @return the number of occurrences of {@code toSearch} in {@code text}, * or 0 if {@code text} was null or empty. * @throws NullArgumentException if the {@code toSearch} argument is null. * @throws IllegalArgumentException if the {@code toSearch} argument is empty. */ public static int count(final CharSequence text, final String toSearch) { ArgumentChecks.ensureNonEmpty("toSearch", toSearch); final int length = toSearch.length(); if (length == 1) { // Implementation working on a single character is faster. return count(text, toSearch.charAt(0)); } int n = 0; if (text != null) { int i = 0; while ((i = indexOf(text, toSearch, i, text.length())) >= 0) { n++; i += length; } } return n; } /** * Counts the number of occurrence of the given character in the given character sequence. * * @param text the character sequence to count occurrences, or {@code null}. * @param toSearch the character to count. * @return the number of occurrences of the given character, or 0 if the {@code text} is null. */ public static int count(final CharSequence text, final char toSearch) { int n = 0; if (text != null) { if (text instanceof String) { final String s = (String) text; for (int i=s.indexOf(toSearch); ++i != 0; i=s.indexOf(toSearch, i)) { n++; } } else { // No need to use the code point API here, since we are looking for exact matches. for (int i=text.length(); --i>=0;) { if (text.charAt(i) == toSearch) { n++; } } } } return n; } /** * Returns the index within the given strings of the first occurrence of the specified part, * starting at the specified index. This method is equivalent to the following method call, * except that this method works on arbitrary {@link CharSequence} objects instead of * {@link String}s only, and that the upper limit can be specified: * * {@preformat java * return text.indexOf(part, fromIndex); * } * * There is no restriction on the value of {@code fromIndex}. If negative or greater * than {@code toIndex}, then the behavior of this method is as if the search started * from 0 or {@code toIndex} respectively. This is consistent with the * {@link String#indexOf(String, int)} behavior. * * @param text the string in which to perform the search. * @param toSearch the substring for which to search. * @param fromIndex the index from which to start the search. * @param toIndex the index after the last character where to perform the search. * @return the index within the text of the first occurrence of the specified part, starting at the specified index, * or -1 if no occurrence has been found or if the {@code text} argument is null. * @throws NullArgumentException if the {@code toSearch} argument is null. * @throws IllegalArgumentException if the {@code toSearch} argument is empty. * * @see String#indexOf(String, int) * @see StringBuilder#indexOf(String, int) * @see StringBuffer#indexOf(String, int) */ public static int indexOf(final CharSequence text, final CharSequence toSearch, int fromIndex, int toIndex) { ArgumentChecks.ensureNonEmpty("toSearch", toSearch); if (text != null) { int length = text.length(); if (toIndex > length) { toIndex = length; } if (toSearch instanceof String && toIndex == length) { if (text instanceof String) { return ((String) text).indexOf((String) toSearch, fromIndex); } if (text instanceof StringBuilder) { return ((StringBuilder) text).indexOf((String) toSearch, fromIndex); } if (text instanceof StringBuffer) { return ((StringBuffer) text).indexOf((String) toSearch, fromIndex); } } if (fromIndex < 0) { fromIndex = 0; } length = toSearch.length(); toIndex -= length; search: for (; fromIndex <= toIndex; fromIndex++) { for (int i=0; i<length; i++) { // No need to use the codePointAt API here, since we are looking for exact matches. if (text.charAt(fromIndex + i) != toSearch.charAt(i)) { continue search; } } return fromIndex; } } return -1; } /** * Returns the index within the given character sequence of the first occurrence of the * specified character, starting the search at the specified index. If the character is * not found, then this method returns -1. * * <p>There is no restriction on the value of {@code fromIndex}. If negative or greater * than {@code toIndex}, then the behavior of this method is as if the search started * from 0 or {@code toIndex} respectively. This is consistent with the behavior documented * in {@link String#indexOf(int, int)}.</p> * * @param text the character sequence in which to perform the search, or {@code null}. * @param toSearch the Unicode code point of the character to search. * @param fromIndex the index to start the search from. * @param toIndex the index after the last character where to perform the search. * @return the index of the first occurrence of the given character in the specified sub-sequence, * or -1 if no occurrence has been found or if the {@code text} argument is null. * * @see String#indexOf(int, int) */ public static int indexOf(final CharSequence text, final int toSearch, int fromIndex, int toIndex) { if (text != null) { final int length = text.length(); if (toIndex >= length) { if (text instanceof String) { // String provides a faster implementation. return ((String) text).indexOf(toSearch, fromIndex); } toIndex = length; } if (fromIndex < 0) { fromIndex = 0; } char head = (char) toSearch; char tail = (char) 0; if (head != toSearch) { // Outside BMP plane? head = highSurrogate(toSearch); tail = lowSurrogate (toSearch); toIndex--; } while (fromIndex < toIndex) { if (text.charAt(fromIndex) == head) { if (tail == 0 || text.charAt(fromIndex+1) == tail) { return fromIndex; } } fromIndex++; } } return -1; } /** * Returns the index within the given character sequence of the last occurrence of the * specified character, searching backward in the given index range. * If the character is not found, then this method returns -1. * * <p>There is no restriction on the value of {@code toIndex}. If greater than the text length * or less than {@code fromIndex}, then the behavior of this method is as if the search started * from {@code length} or {@code fromIndex} respectively. This is consistent with the behavior * documented in {@link String#lastIndexOf(int, int)}.</p> * * @param text the character sequence in which to perform the search, or {@code null}. * @param toSearch the Unicode code point of the character to search. * @param fromIndex the index of the first character in the range where to perform the search. * @param toIndex the index after the last character in the range where to perform the search. * @return the index of the last occurrence of the given character in the specified sub-sequence, * or -1 if no occurrence has been found or if the {@code text} argument is null. * * @see String#lastIndexOf(int, int) */ public static int lastIndexOf(final CharSequence text, final int toSearch, int fromIndex, int toIndex) { if (text != null) { if (fromIndex <= 0) { if (text instanceof String) { // String provides a faster implementation. return ((String) text).lastIndexOf(toSearch, toIndex - 1); } fromIndex = 0; } final int length = text.length(); if (toIndex > length) { toIndex = length; } char tail = (char) toSearch; char head = (char) 0; if (tail != toSearch) { // Outside BMP plane? tail = lowSurrogate (toSearch); head = highSurrogate(toSearch); fromIndex++; } while (toIndex > fromIndex) { if (text.charAt(--toIndex) == tail) { if (head == 0 || text.charAt(--toIndex) == head) { return toIndex; } } } } return -1; } /** * Returns the index of the first character after the given number of lines. * This method counts the number of occurrence of {@code '\n'}, {@code '\r'} * or {@code "\r\n"} starting from the given position. When {@code numLines} * occurrences have been found, the index of the first character after the last * occurrence is returned. * * <p>If the {@code numLines} argument is positive, this method searches forward. * If negative, this method searches backward. If 0, this method returns the * beginning of the current line.</p> * * <p>If this method reaches the end of {@code text} while searching forward, then * {@code text.length()} is returned. If this method reaches the beginning of * {@code text} while searching backward, then 0 is returned.</p> * * @param text the string in which to skip a determined amount of lines. * @param numLines the number of lines to skip. Can be positive, zero or negative. * @param fromIndex index at which to start the search, from 0 to {@code text.length()} inclusive. * @return index of the first character after the last skipped line. * @throws NullPointerException if the {@code text} argument is null. * @throws IndexOutOfBoundsException if {@code fromIndex} is out of bounds. */ public static int indexOfLineStart(final CharSequence text, int numLines, int fromIndex) { final int length = text.length(); /* * Go backward if the number of lines is negative. * No need to use the codePoint API because we are * looking only for characters in the BMP plane. */ if (numLines <= 0) { do { char c; do { if (fromIndex == 0) { return fromIndex; } c = text.charAt(--fromIndex); if (c == '\n') { if (fromIndex != 0 && text.charAt(fromIndex - 1) == '\r') { --fromIndex; } break; } } while (c != '\r'); } while (++numLines != 1); // Execute the forward code below for skipping the "end of line" characters. } /* * Skips forward the given amount of lines. */ while (--numLines >= 0) { char c; do { if (fromIndex == length) { return fromIndex; } c = text.charAt(fromIndex++); if (c == '\r') { if (fromIndex != length && text.charAt(fromIndex) == '\n') { fromIndex++; } break; } } while (c != '\n'); } return fromIndex; } /** * Returns the index of the first non-white character in the given range. * If the given range contains only space characters, then this method returns the index of the * first character after the given range, which is always equals or greater than {@code toIndex}. * Note that this character may not exist if {@code toIndex} is equals to the text length. * * <p>Special cases:</p> * <ul> * <li>If {@code fromIndex} is greater than {@code toIndex}, * then this method unconditionally returns {@code fromIndex}.</li> * <li>If the given range contains only space characters and the character at {@code toIndex-1} * is the high surrogate of a valid supplementary code point, then this method returns * {@code toIndex+1}, which is the index of the next code point.</li> * <li>If {@code fromIndex} is negative or {@code toIndex} is greater than the text length, * then the behavior of this method is undefined.</li> * </ul> * * Space characters are identified by the {@link Character#isWhitespace(int)} method. * * @param text the string in which to perform the search (can not be null). * @param fromIndex the index from which to start the search (can not be negative). * @param toIndex the index after the last character where to perform the search. * @return the index within the text of the first occurrence of a non-space character, starting * at the specified index, or a value equals or greater than {@code toIndex} if none. * @throws NullPointerException if the {@code text} argument is null. * * @see #skipTrailingWhitespaces(CharSequence, int, int) * @see #trimWhitespaces(CharSequence) * @see String#stripLeading() */ public static int skipLeadingWhitespaces(final CharSequence text, int fromIndex, final int toIndex) { while (fromIndex < toIndex) { final int c = codePointAt(text, fromIndex); if (!isWhitespace(c)) break; fromIndex += charCount(c); } return fromIndex; } /** * Returns the index <em>after</em> the last non-white character in the given range. * If the given range contains only space characters, then this method returns the index of the * first character in the given range, which is always equals or lower than {@code fromIndex}. * * <p>Special cases:</p> * <ul> * <li>If {@code fromIndex} is lower than {@code toIndex}, * then this method unconditionally returns {@code toIndex}.</li> * <li>If the given range contains only space characters and the character at {@code fromIndex} * is the low surrogate of a valid supplementary code point, then this method returns * {@code fromIndex-1}, which is the index of the code point.</li> * <li>If {@code fromIndex} is negative or {@code toIndex} is greater than the text length, * then the behavior of this method is undefined.</li> * </ul> * * Space characters are identified by the {@link Character#isWhitespace(int)} method. * * @param text the string in which to perform the search (can not be null). * @param fromIndex the index from which to start the search (can not be negative). * @param toIndex the index after the last character where to perform the search. * @return the index within the text of the last occurrence of a non-space character, starting * at the specified index, or a value equals or lower than {@code fromIndex} if none. * @throws NullPointerException if the {@code text} argument is null. * * @see #skipLeadingWhitespaces(CharSequence, int, int) * @see #trimWhitespaces(CharSequence) * @see String#stripTrailing() */ public static int skipTrailingWhitespaces(final CharSequence text, final int fromIndex, int toIndex) { while (toIndex > fromIndex) { final int c = codePointBefore(text, toIndex); if (!isWhitespace(c)) break; toIndex -= charCount(c); } return toIndex; } /** * Allocates the array to be returned by the {@code split(…)} methods. If the given {@code text} argument is * an instance of {@link String}, {@link StringBuilder} or {@link StringBuffer}, then this method returns a * {@code String[]} array instead of {@code CharSequence[]}. This is possible because the specification of * their {@link CharSequence#subSequence(int, int)} method guarantees to return {@code String} instances. * Some Apache SIS code will cast the {@code split(…)} return value based on this knowledge. * * <p>Note that this is a undocumented SIS features. There is currently no commitment that this implementation * details will not change in future version.</p> * * @param text the text to be splitted. * @return an array where to store the result of splitting the given {@code text}. */ private static CharSequence[] createSplitArray(final CharSequence text) { return (text instanceof String || text instanceof StringBuilder || text instanceof StringBuffer) ? new String[8] : new CharSequence[8]; } /** * Splits a text around the given character. The array returned by this method contains all * subsequences of the given text that is terminated by the given character or is terminated * by the end of the text. The subsequences in the array are in the order in which they occur * in the given text. If the character is not found in the input, then the resulting array has * just one element, which is the whole given text. * * <p>This method is similar to the standard {@link String#split(String)} method except for the * following:</p> * * <ul> * <li>It accepts generic character sequences.</li> * <li>It accepts {@code null} argument, in which case an empty array is returned.</li> * <li>The separator is a simple character instead of a regular expression.</li> * <li>If the {@code separator} argument is {@code '\n'} or {@code '\r'}, then this method * splits around any of {@code "\r"}, {@code "\n"} or {@code "\r\n"} characters sequences. * <li>The leading and trailing spaces of each subsequences are trimmed.</li> * </ul> * * @param text the text to split, or {@code null}. * @param separator the delimiting character (typically the coma). * @return the array of subsequences computed by splitting the given text around the given * character, or an empty array if {@code text} was null. * * @see String#split(String) */ @SuppressWarnings("ReturnOfCollectionOrArrayField") public static CharSequence[] split(final CharSequence text, final char separator) { if (text == null) { return EMPTY_ARRAY; } if (separator == '\n' || separator == '\r') { final CharSequence[] splitted = splitOnEOL(text); for (int i=0; i < splitted.length; i++) { // For consistency with the rest of this method. splitted[i] = trimWhitespaces(splitted[i]); } return splitted; } // 'excludeEmpty' must use the same criterion than trimWhitespaces(…). final boolean excludeEmpty = isWhitespace(separator); CharSequence[] splitted = createSplitArray(text); final int length = text.length(); int count = 0, last = 0, i = 0; while ((i = indexOf(text, separator, i, length)) >= 0) { final CharSequence item = trimWhitespaces(text, last, i); if (!excludeEmpty || item.length() != 0) { if (count == splitted.length) { splitted = Arrays.copyOf(splitted, count << 1); } splitted[count++] = item; } last = ++i; } // Add the last element. final CharSequence item = trimWhitespaces(text, last, length); if (!excludeEmpty || item.length() != 0) { if (count == splitted.length) { splitted = Arrays.copyOf(splitted, count + 1); } splitted[count++] = item; } return ArraysExt.resize(splitted, count); } /** * Splits a text around the <cite>End Of Line</cite> (EOL) characters. * EOL characters can be any of {@code "\r"}, {@code "\n"} or {@code "\r\n"} sequences. * Each element in the returned array will be a single line. If the given text is already * a single line, then this method returns a singleton containing only the given text. * * <p>Notes:</p> * <ul> * <li>At the difference of <code>{@linkplain #split split}(toSplit, '\n’)</code>, * this method does not remove whitespaces.</li> * <li>This method does not check for Unicode * {@linkplain Characters#LINE_SEPARATOR line separator} and * {@linkplain Characters#PARAGRAPH_SEPARATOR paragraph separator}.</li> * </ul> * * <div class="note"><b>Performance note:</b> * Prior JDK8 this method was usually cheap because all string instances created by * {@link String#substring(int,int)} shared the same {@code char[]} internal array. * However since JDK8, the new {@code String} implementation copies the data in new arrays. * Consequently it is better to use index rather than this method for splitting large {@code String}s. * However this method still useful for other {@link CharSequence} implementations providing an efficient * {@code subSequence(int,int)} method.</div> * * @param text the multi-line text from which to get the individual lines, or {@code null}. * @return the lines in the text, or an empty array if the given text was null. * * @see #indexOfLineStart(CharSequence, int, int) */ @SuppressWarnings("ReturnOfCollectionOrArrayField") public static CharSequence[] splitOnEOL(final CharSequence text) { if (text == null) { return EMPTY_ARRAY; } /* * This method is implemented on top of String.indexOf(int,int), * assuming that it will be faster for String and StringBuilder. */ final int length = text.length(); int lf = indexOf(text, '\n', 0, length); int cr = indexOf(text, '\r', 0, length); if (lf < 0 && cr < 0) { return new CharSequence[] { text }; } int count = 0; CharSequence[] splitted = createSplitArray(text); int last = 0; boolean hasMore; do { int skip = 1; final int splitAt; if (cr < 0) { // There is no "\r" character in the whole text, only "\n". splitAt = lf; hasMore = (lf = indexOf(text, '\n', lf+1, length)) >= 0; } else if (lf < 0) { // There is no "\n" character in the whole text, only "\r". splitAt = cr; hasMore = (cr = indexOf(text, '\r', cr+1, length)) >= 0; } else if (lf < cr) { // There is both "\n" and "\r" characters with "\n" first. splitAt = lf; hasMore = true; lf = indexOf(text, '\n', lf+1, length); } else { // There is both "\r" and "\n" characters with "\r" first. // We need special care for the "\r\n" sequence. splitAt = cr; if (lf == ++cr) { cr = indexOf(text, '\r', cr+1, length); lf = indexOf(text, '\n', lf+1, length); hasMore = (cr >= 0 || lf >= 0); skip = 2; } else { cr = indexOf(text, '\r', cr+1, length); hasMore = true; // Because there is lf. } } if (count >= splitted.length) { splitted = Arrays.copyOf(splitted, count*2); } splitted[count++] = text.subSequence(last, splitAt); last = splitAt + skip; } while (hasMore); /* * Add the remaining string and we are done. */ if (count >= splitted.length) { splitted = Arrays.copyOf(splitted, count+1); } splitted[count++] = text.subSequence(last, text.length()); return ArraysExt.resize(splitted, count); } /** * Returns {@code true} if {@link #split(CharSequence, char)} parsed an empty string. */ private static boolean isEmpty(final CharSequence[] tokens) { switch (tokens.length) { case 0: return true; case 1: return tokens[0].length() == 0; default: return false; } } /** * {@linkplain #split(CharSequence, char) Splits} the given text around the given character, * then {@linkplain Double#parseDouble(String) parses} each item as a {@code double}. * Empty sub-sequences are parsed as {@link Double#NaN}. * * @param values the text containing the values to parse, or {@code null}. * @param separator the delimiting character (typically the coma). * @return the array of numbers parsed from the given text, * or an empty array if {@code values} was null. * @throws NumberFormatException if at least one number can not be parsed. */ public static double[] parseDoubles(final CharSequence values, final char separator) throws NumberFormatException { final CharSequence[] tokens = split(values, separator); if (isEmpty(tokens)) return ArraysExt.EMPTY_DOUBLE; final double[] parsed = new double[tokens.length]; for (int i=0; i<tokens.length; i++) { final String token = trimWhitespaces(tokens[i]).toString(); parsed[i] = token.isEmpty() ? Double.NaN : Double.parseDouble(token); } return parsed; } /** * {@linkplain #split(CharSequence, char) Splits} the given text around the given character, * then {@linkplain Float#parseFloat(String) parses} each item as a {@code float}. * Empty sub-sequences are parsed as {@link Float#NaN}. * * @param values the text containing the values to parse, or {@code null}. * @param separator the delimiting character (typically the coma). * @return the array of numbers parsed from the given text, * or an empty array if {@code values} was null. * @throws NumberFormatException if at least one number can not be parsed. */ public static float[] parseFloats(final CharSequence values, final char separator) throws NumberFormatException { final CharSequence[] tokens = split(values, separator); if (isEmpty(tokens)) return ArraysExt.EMPTY_FLOAT; final float[] parsed = new float[tokens.length]; for (int i=0; i<tokens.length; i++) { final String token = trimWhitespaces(tokens[i]).toString(); parsed[i] = token.isEmpty() ? Float.NaN : Float.parseFloat(token); } return parsed; } /** * {@linkplain #split(CharSequence, char) Splits} the given text around the given character, * then {@linkplain Long#parseLong(String) parses} each item as a {@code long}. * * @param values the text containing the values to parse, or {@code null}. * @param separator the delimiting character (typically the coma). * @param radix the radix to be used for parsing. This is usually 10. * @return the array of numbers parsed from the given text, * or an empty array if {@code values} was null. * @throws NumberFormatException if at least one number can not be parsed. */ public static long[] parseLongs(final CharSequence values, final char separator, final int radix) throws NumberFormatException { final CharSequence[] tokens = split(values, separator); if (isEmpty(tokens)) return ArraysExt.EMPTY_LONG; final long[] parsed = new long[tokens.length]; for (int i=0; i<tokens.length; i++) { parsed[i] = Long.parseLong(trimWhitespaces(tokens[i]).toString(), radix); } return parsed; } /** * {@linkplain #split(CharSequence, char) Splits} the given text around the given character, * then {@linkplain Integer#parseInt(String) parses} each item as an {@code int}. * * @param values the text containing the values to parse, or {@code null}. * @param separator the delimiting character (typically the coma). * @param radix the radix to be used for parsing. This is usually 10. * @return the array of numbers parsed from the given text, * or an empty array if {@code values} was null. * @throws NumberFormatException if at least one number can not be parsed. */ public static int[] parseInts(final CharSequence values, final char separator, final int radix) throws NumberFormatException { final CharSequence[] tokens = split(values, separator); if (isEmpty(tokens)) return ArraysExt.EMPTY_INT; final int[] parsed = new int[tokens.length]; for (int i=0; i<tokens.length; i++) { parsed[i] = Integer.parseInt(trimWhitespaces(tokens[i]).toString(), radix); } return parsed; } /** * {@linkplain #split(CharSequence, char) Splits} the given text around the given character, * then {@linkplain Short#parseShort(String) parses} each item as a {@code short}. * * @param values the text containing the values to parse, or {@code null}. * @param separator the delimiting character (typically the coma). * @param radix the radix to be used for parsing. This is usually 10. * @return the array of numbers parsed from the given text, * or an empty array if {@code values} was null. * @throws NumberFormatException if at least one number can not be parsed. */ public static short[] parseShorts(final CharSequence values, final char separator, final int radix) throws NumberFormatException { final CharSequence[] tokens = split(values, separator); if (isEmpty(tokens)) return ArraysExt.EMPTY_SHORT; final short[] parsed = new short[tokens.length]; for (int i=0; i<tokens.length; i++) { parsed[i] = Short.parseShort(trimWhitespaces(tokens[i]).toString(), radix); } return parsed; } /** * {@linkplain #split(CharSequence, char) Splits} the given text around the given character, * then {@linkplain Byte#parseByte(String) parses} each item as a {@code byte}. * * @param values the text containing the values to parse, or {@code null}. * @param separator the delimiting character (typically the coma). * @param radix the radix to be used for parsing. This is usually 10. * @return the array of numbers parsed from the given text, * or an empty array if {@code values} was null. * @throws NumberFormatException if at least one number can not be parsed. */ public static byte[] parseBytes(final CharSequence values, final char separator, final int radix) throws NumberFormatException { final CharSequence[] tokens = split(values, separator); if (isEmpty(tokens)) return ArraysExt.EMPTY_BYTE; final byte[] parsed = new byte[tokens.length]; for (int i=0; i<tokens.length; i++) { parsed[i] = Byte.parseByte(trimWhitespaces(tokens[i]).toString(), radix); } return parsed; } /** * Replaces some Unicode characters by ASCII characters on a "best effort basis". * For example the “ é ” character is replaced by “ e ” (without accent), * the “ ″ ” symbol for minutes of angle is replaced by straight double quotes “ " ”, * and combined characters like ㎏, ㎎, ㎝, ㎞, ㎢, ㎦, ㎖, ㎧, ㎩, ㎐, <i>etc.</i> are replaced * by the corresponding sequences of characters. * * <div class="note"><b>Note:</b> * the replacement of Greek letters is a more complex task than what this method can do, * since it depends on the context. For example if the Greek letters are abbreviations * for coordinate system axes like φ and λ, then the replacements depend on the enclosing * coordinate system. See {@link org.apache.sis.io.wkt.Transliterator} for more information.</div> * * @param text the text to scan for Unicode characters to replace by ASCII characters, or {@code null}. * @return the given text with substitutions applied, or {@code text} if no replacement * has been applied, or {@code null} if the given text was null. * * @see StringBuilders#toASCII(StringBuilder) * @see org.apache.sis.io.wkt.Transliterator#filter(String) * @see java.text.Normalizer */ public static CharSequence toASCII(final CharSequence text) { return StringBuilders.toASCII(text, null); } /** * Returns a string with leading and trailing whitespace characters omitted. * This method is similar in purpose to {@link String#trim()}, except that the later considers * every {@linkplain Character#isISOControl(int) ISO control codes} below 32 to be a whitespace. * That {@code String.trim()} behavior has the side effect of removing the heading of ANSI escape * sequences (a.k.a. X3.64), and to ignore Unicode spaces. This {@code trimWhitespaces(…)} method * is built on the more accurate {@link Character#isWhitespace(int)} method instead. * * <p>This method performs the same work than {@link #trimWhitespaces(CharSequence)}, * but is overloaded for the {@code String} type because of its frequent use.</p> * * @param text the text from which to remove leading and trailing whitespaces, or {@code null}. * @return a string with leading and trailing whitespaces removed, or {@code null} is the given * text was null. * * @todo To be replaced by {@link String#strip()} in JDK 11. */ public static String trimWhitespaces(String text) { if (text != null) { final int length = text.length(); final int lower = skipLeadingWhitespaces(text, 0, length); text = text.substring(lower, skipTrailingWhitespaces(text, lower, length)); } return text; } /** * Returns a text with leading and trailing whitespace characters omitted. * Space characters are identified by the {@link Character#isWhitespace(int)} method. * * <p>This method is the generic version of {@link #trimWhitespaces(String)}.</p> * * @param text the text from which to remove leading and trailing whitespaces, or {@code null}. * @return a characters sequence with leading and trailing whitespaces removed, * or {@code null} is the given text was null. * * @see #skipLeadingWhitespaces(CharSequence, int, int) * @see #skipTrailingWhitespaces(CharSequence, int, int) * @see String#strip() */ public static CharSequence trimWhitespaces(CharSequence text) { if (text != null) { text = trimWhitespaces(text, 0, text.length()); } return text; } /** * Returns a sub-sequence with leading and trailing whitespace characters omitted. * Space characters are identified by the {@link Character#isWhitespace(int)} method. * * <p>Invoking this method is functionally equivalent to the following code snippet, * except that the {@link CharSequence#subSequence(int, int) subSequence} method is * invoked only once instead of two times:</p> * * {@preformat java * text = trimWhitespaces(text.subSequence(lower, upper)); * } * * @param text the text from which to remove leading and trailing white spaces. * @param lower index of the first character to consider for inclusion in the sub-sequence. * @param upper index after the last character to consider for inclusion in the sub-sequence. * @return a characters sequence with leading and trailing white spaces removed, or {@code null} * if the {@code text} argument is null. * @throws IndexOutOfBoundsException if {@code lower} or {@code upper} is out of bounds. */ public static CharSequence trimWhitespaces(CharSequence text, int lower, int upper) { final int length = length(text); ArgumentChecks.ensureValidIndexRange(length, lower, upper); if (text != null) { lower = skipLeadingWhitespaces (text, lower, upper); upper = skipTrailingWhitespaces(text, lower, upper); if (lower != 0 || upper != length) { // Safety in case subSequence doesn't make the check. text = text.subSequence(lower, upper); } } return text; } /** * Trims the fractional part of the given formatted number, provided that it doesn't change * the value. This method assumes that the number is formatted in the US locale, typically * by the {@link Double#toString(double)} method. * * <p>More specifically if the given value ends with a {@code '.'} character followed by a * sequence of {@code '0'} characters, then those characters are omitted. Otherwise this * method returns the text unchanged. This is a <cite>"all or nothing"</cite> method: * either the fractional part is completely removed, or either it is left unchanged.</p> * * <h4>Examples</h4> * This method returns {@code "4"} if the given value is {@code "4."}, {@code "4.0"} or * {@code "4.00"}, but returns {@code "4.10"} unchanged (including the trailing {@code '0'} * character) if the input is {@code "4.10"}. * * <h4>Use case</h4> * This method is useful before to {@linkplain Integer#parseInt(String) parse a number} * if that number should preferably be parsed as an integer before attempting to parse * it as a floating point number. * * @param value the value to trim if possible, or {@code null}. * @return the value without the trailing {@code ".0"} part (if any), * or {@code null} if the given text was null. * * @see StringBuilders#trimFractionalPart(StringBuilder) */ public static CharSequence trimFractionalPart(final CharSequence value) { if (value != null) { for (int i=value.length(); i>0;) { final int c = codePointBefore(value, i); i -= charCount(c); switch (c) { case '0': continue; case '.': return value.subSequence(0, i); default : return value; } } } return value; } /** * Makes sure that the {@code text} string is not longer than {@code maxLength} characters. * If {@code text} is not longer, then it is returned unchanged. Otherwise this method returns * a copy of {@code text} with some characters substituted by the {@code "(…)"} string. * * <p>If the text needs to be shortened, then this method tries to apply the above-cited * substitution between two words. For example, the following text:</p> * * <blockquote> * "This sentence given as an example is way too long to be included in a short name." * </blockquote> * * May be shortened to something like this: * * <blockquote> * "This sentence given (…) in a short name." * </blockquote> * * @param text the sentence to reduce if it is too long, or {@code null}. * @param maxLength the maximum length allowed for {@code text}. * @return a sentence not longer than {@code maxLength}, or {@code null} if the given text was null. */ public static CharSequence shortSentence(CharSequence text, final int maxLength) { ArgumentChecks.ensureStrictlyPositive("maxLength", maxLength); if (text != null) { final int length = text.length(); int toRemove = length - maxLength; if (toRemove > 0) { toRemove += 5; // Space needed for the " (…) " string. /* * We will remove characters from 'lower' to 'upper' both exclusive. We try to * adjust 'lower' and 'upper' in such a way that the first and last characters * to be removed will be spaces or punctuation characters. */ int lower = length >>> 1; if (lower != 0 && isLowSurrogate(text.charAt(lower))) { lower--; } int upper = lower; boolean forward = false; do { // To be run as long as we need to remove more characters. int nc=0, type=UNASSIGNED; forward = !forward; searchWordBreak: while (true) { final int c; if (forward) { if ((upper += nc) == length) break; c = codePointAt(text, upper); } else { if ((lower -= nc) == 0) break; c = codePointBefore(text, lower); } nc = charCount(c); if (isWhitespace(c)) { if (type != UNASSIGNED) { type = SPACE_SEPARATOR; } } else switch (type) { // After we skipped white, then non-white, then white characters, stop. case SPACE_SEPARATOR: { break searchWordBreak; } // For the first non-white character, just remember its type. // Arbitrarily use UPPERCASE_LETTER for any kind of identifier // part (which include UPPERCASE_LETTER anyway). case UNASSIGNED: { type = isUnicodeIdentifierPart(c) ? UPPERCASE_LETTER : getType(c); break; } // If we expected an identifier, stop at the first other char. case UPPERCASE_LETTER: { if (!isUnicodeIdentifierPart(c)) { break searchWordBreak; } break; } // For all other kind of character, break when the type change. default: { if (getType(c) != type) { break searchWordBreak; } break; } } toRemove -= nc; } } while (toRemove > 0); text = new StringBuilder(lower + (length-upper) + 5) // 5 is the length of " (…) " .append(text, 0, lower).append(" (…) ").append(text, upper, length); } } return text; } /** * Given a string in upper cases (typically a Java constant), returns a string formatted * like an English sentence. This heuristic method performs the following steps: * * <ol> * <li>Replace all occurrences of {@code '_'} by spaces.</li> * <li>Converts all letters except the first one to lower case letters using * {@link Character#toLowerCase(int)}. Note that this method does not use * the {@link String#toLowerCase()} method. Consequently the system locale * is ignored. This method behaves as if the conversion were done in the * {@linkplain java.util.Locale#ROOT root} locale.</li> * </ol> * * <p>Note that those heuristic rules may be modified in future SIS versions, * depending on the practical experience gained.</p> * * @param identifier the name of a Java constant, or {@code null}. * @return the identifier like an English sentence, or {@code null} * if the given {@code identifier} argument was null. */ public static CharSequence upperCaseToSentence(final CharSequence identifier) { if (identifier == null) { return null; } final StringBuilder buffer = new StringBuilder(identifier.length()); final int length = identifier.length(); for (int i=0; i<length;) { int c = codePointAt(identifier, i); if (i != 0) { if (c == '_') { c = ' '; } else { c = toLowerCase(c); } } buffer.appendCodePoint(c); i += charCount(c); } return buffer; } /** * Given a string in camel cases (typically an identifier), returns a string formatted * like an English sentence. This heuristic method performs the following steps: * * <ol> * <li>Invoke {@link #camelCaseToWords(CharSequence, boolean)}, which separate the words * on the basis of character case. For example {@code "transferFunctionType"} become * <cite>"transfer function type"</cite>. This works fine for ISO 19115 identifiers.</li> * * <li>Next replace all occurrence of {@code '_'} by spaces in order to take in account * an other common naming convention, which uses {@code '_'} as a word separator. This * convention is used by netCDF attributes like {@code "project_name"}.</li> * * <li>Finally ensure that the first character is upper-case.</li> * </ol> * * <h4>Exception to the above rules</h4> * If the given identifier contains only upper-case letters, digits and the {@code '_'} character, * then the identifier is returned "as is" except for the {@code '_'} characters which are replaced by {@code '-'}. * This work well for identifiers like {@code "UTF-8"} or {@code "ISO-LATIN-1"} for instance. * * <p>Note that those heuristic rules may be modified in future SIS versions, * depending on the practical experience gained.</p> * * @param identifier an identifier with no space, words begin with an upper-case character, or {@code null}. * @return the identifier with spaces inserted after what looks like words, or {@code null} * if the given {@code identifier} argument was null. */ public static CharSequence camelCaseToSentence(final CharSequence identifier) { if (identifier == null) { return null; } final StringBuilder buffer; if (isCode(identifier)) { if (identifier instanceof String) { return ((String) identifier).replace('_', '-'); } buffer = new StringBuilder(identifier); StringBuilders.replace(buffer, '_', '-'); } else { buffer = (StringBuilder) camelCaseToWords(identifier, true); final int length = buffer.length(); if (length != 0) { StringBuilders.replace(buffer, '_', ' '); final int c = buffer.codePointAt(0); final int up = toUpperCase(c); if (c != up) { StringBuilders.replace(buffer, 0, charCount(c), toChars(up)); } } } return buffer; } /** * Given a string in camel cases, returns a string with the same words separated by spaces. * A word begins with a upper-case character following a lower-case character. For example * if the given string is {@code "PixelInterleavedSampleModel"}, then this method returns * <cite>"Pixel Interleaved Sample Model"</cite> or <cite>"Pixel interleaved sample model"</cite> * depending on the value of the {@code toLowerCase} argument. * * <p>If {@code toLowerCase} is {@code false}, then this method inserts spaces but does not change * the case of characters. If {@code toLowerCase} is {@code true}, then this method changes * {@linkplain Character#toLowerCase(int) to lower case} the first character after each spaces * inserted by this method (note that this intentionally exclude the very first character in * the given string), except if the second character {@linkplain Character#isUpperCase(int) * is upper case}, in which case the word is assumed an acronym.</p> * * <p>The given string is usually a programmatic identifier like a class name or a method name.</p> * * @param identifier an identifier with no space, words begin with an upper-case character. * @param toLowerCase {@code true} for changing the first character of words to lower case, * except for the first word and acronyms. * @return the identifier with spaces inserted after what looks like words, or {@code null} * if the given {@code identifier} argument was null. */ public static CharSequence camelCaseToWords(final CharSequence identifier, final boolean toLowerCase) { if (identifier == null) { return null; } /* * Implementation note: the 'camelCaseToSentence' method needs * this method to unconditionally returns a new StringBuilder. */ final int length = identifier.length(); final StringBuilder buffer = new StringBuilder(length + 8); final int lastIndex = (length != 0) ? length - charCount(codePointBefore(identifier, length)) : 0; int last = 0; for (int i=1; i<=length;) { final int cp; final boolean doAppend; if (i == length) { cp = 0; doAppend = true; } else { cp = codePointAt(identifier, i); doAppend = Character.isUpperCase(cp) && isLowerCase(codePointBefore(identifier, i)); } if (doAppend) { final int pos = buffer.length(); buffer.append(identifier, last, i).append(' '); if (toLowerCase && pos!=0 && last<lastIndex && isLowerCase(codePointAfter(identifier, last))) { final int c = buffer.codePointAt(pos); final int low = toLowerCase(c); if (c != low) { StringBuilders.replace(buffer, pos, pos + charCount(c), toChars(low)); } } last = i; } i += charCount(cp); } /* * Removes the trailing space, if any. */ final int lg = buffer.length(); if (lg != 0) { final int cp = buffer.codePointBefore(lg); if (isWhitespace(cp)) { buffer.setLength(lg - charCount(cp)); } } return buffer; } /** * Creates an acronym from the given text. This method returns a string containing the first character of each word, * where the words are separated by the camel case convention, the {@code '_'} character, or any character which is * not a {@linkplain Character#isUnicodeIdentifierPart(int) Unicode identifier part} (including spaces). * * <p>An exception to the above rule happens if the given text is a Unicode identifier without the {@code '_'} * character, and every characters are upper case. In such case the text is returned unchanged on the assumption * that it is already an acronym.</p> * * <p><b>Examples:</b> given {@code "northEast"}, this method returns {@code "NE"}. * Given {@code "Open Geospatial Consortium"}, this method returns {@code "OGC"}.</p> * * @param text the text for which to create an acronym, or {@code null}. * @return the acronym, or {@code null} if the given text was null. */ public static CharSequence camelCaseToAcronym(CharSequence text) { text = trimWhitespaces(text); if (text != null && !isAcronym(text)) { final int length = text.length(); final StringBuilder buffer = new StringBuilder(8); // Acronyms are usually short. boolean wantChar = true; for (int i=0; i<length;) { final int c = codePointAt(text, i); if (wantChar) { if (isUnicodeIdentifierStart(c)) { buffer.appendCodePoint(c); wantChar = false; } } else if (!isUnicodeIdentifierPart(c) || c == '_') { wantChar = true; } else if (Character.isUpperCase(c)) { // Test for mixed-case (e.g. "northEast"). // Note that i is guaranteed to be greater than 0 here. if (!Character.isUpperCase(codePointBefore(text, i))) { buffer.appendCodePoint(c); } } i += charCount(c); } final int acrlg = buffer.length(); if (acrlg != 0) { /* * If every characters except the first one are upper-case, ensure that the * first one is upper-case as well. This is for handling the identifiers which * are compliant to Java-Beans convention (e.g. "northEast"). */ if (isUpperCase(buffer, 1, acrlg, true)) { final int c = buffer.codePointAt(0); final int up = toUpperCase(c); if (c != up) { StringBuilders.replace(buffer, 0, charCount(c), toChars(up)); } } if (!equals(text, buffer)) { text = buffer; } } } return text; } /** * Returns {@code true} if the first string is likely to be an acronym of the second string. * An acronym is a sequence of {@linkplain Character#isLetterOrDigit(int) letters or digits} * built from at least one character of each word in the {@code words} string. More than * one character from the same word may appear in the acronym, but they must always * be the first consecutive characters. The comparison is case-insensitive. * * <div class="note"><b>Example:</b> * Given the {@code "Open Geospatial Consortium"} words, the following strings are recognized as acronyms: * {@code "OGC"}, {@code "ogc"}, {@code "O.G.C."}, {@code "OpGeoCon"}.</div> * * If any of the given arguments is {@code null}, this method returns {@code false}. * * @param acronym a possible acronym of the sequence of words, or {@code null}. * @param words the sequence of words, or {@code null}. * @return {@code true} if the first string is an acronym of the second one. */ public static boolean isAcronymForWords(final CharSequence acronym, final CharSequence words) { final int lga = length(acronym); int ia=0, ca; do { if (ia >= lga) return false; ca = codePointAt(acronym, ia); ia += charCount(ca); } while (!isLetterOrDigit(ca)); final int lgc = length(words); int ic=0, cc; do { if (ic >= lgc) return false; cc = codePointAt(words, ic); ic += charCount(cc); } while (!isLetterOrDigit(cc)); if (toUpperCase(ca) != toUpperCase(cc)) { // The first letter must match. return false; } cmp: while (ia < lga) { if (ic >= lgc) { // There is more letters in the acronym than in the complete name. return false; } ca = codePointAt(acronym, ia); ia += charCount(ca); cc = codePointAt(words, ic); ic += charCount(cc); if (isLetterOrDigit(ca)) { if (toUpperCase(ca) == toUpperCase(cc)) { // Acronym letter matches the letter from the complete name. // Continue the comparison with next letter of both strings. continue; } // Will search for the next word after the 'else' block. } else do { if (ia >= lga) break cmp; ca = codePointAt(acronym, ia); ia += charCount(ca); } while (!isLetterOrDigit(ca)); /* * At this point, 'ca' is the next acronym letter to compare and we * need to search for the next word in the complete name. We first * skip remaining letters, then we skip non-letter characters. */ boolean skipLetters = true; do while (isLetterOrDigit(cc) == skipLetters) { if (ic >= lgc) { return false; } cc = codePointAt(words, ic); ic += charCount(cc); } while ((skipLetters = !skipLetters) == false); // Now that we are aligned on a new word, the first letter must match. if (toUpperCase(ca) != toUpperCase(cc)) { return false; } } /* * Now that we have processed all acronym letters, the complete name can not have * any additional word. We can only finish the current word and skip trailing non- * letter characters. */ boolean skipLetters = true; do { do { if (ic >= lgc) return true; cc = codePointAt(words, ic); ic += charCount(cc); } while (isLetterOrDigit(cc) == skipLetters); } while ((skipLetters = !skipLetters) == false); return false; } /** * Returns {@code true} if the given string contains only upper case letters or digits. * A few punctuation characters like {@code '_'} and {@code '.'} are also accepted. * * <p>This method is used for identifying character strings that are likely to be code * like {@code "UTF-8"} or {@code "ISO-LATIN-1"}.</p> * * @see #isUnicodeIdentifier(CharSequence) */ private static boolean isCode(final CharSequence identifier) { for (int i=identifier.length(); --i>=0;) { final char c = identifier.charAt(i); // No need to use the code point API here, since the conditions // below are requiring the characters to be in the basic plane. if (!((c >= 'A' && c <= 'Z') || (c >= '-' && c <= ':') || c == '_')) { return false; } } return true; } /** * Returns {@code true} if the given text is presumed to be an acronym. Acronyms are presumed * to be valid Unicode identifiers in all upper-case letters and without the {@code '_'} character. * * @see #camelCaseToAcronym(CharSequence) */ private static boolean isAcronym(final CharSequence text) { return isUpperCase(text) && indexOf(text, '_', 0, text.length()) < 0 && isUnicodeIdentifier(text); } /** * Returns {@code true} if the given identifier is a legal Unicode identifier. * This method returns {@code true} if the identifier length is greater than zero, * the first character is a {@linkplain Character#isUnicodeIdentifierStart(int) * Unicode identifier start} and all remaining characters (if any) are * {@linkplain Character#isUnicodeIdentifierPart(int) Unicode identifier parts}. * * <h4>Relationship with legal XML identifiers</h4> * Most legal Unicode identifiers are also legal XML identifiers, but the converse is not true. * The most noticeable differences are the ‘{@code :}’, ‘{@code -}’ and ‘{@code .}’ characters, * which are legal in XML identifiers but not in Unicode. * * <table class="sis"> * <caption>Characters legal in one set but not in the other</caption> * <tr><th colspan="2">Not legal in Unicode</th> <th class="sep" colspan="2">Not legal in XML</th></tr> * <tr><td>{@code :}</td><td>(colon)</td> <td class="sep">{@code µ}</td><td>(micro sign)</td></tr> * <tr><td>{@code -}</td><td>(hyphen or minus)</td> <td class="sep">{@code ª}</td><td>(feminine ordinal indicator)</td></tr> * <tr><td>{@code .}</td><td>(dot)</td> <td class="sep">{@code º}</td><td>(masculine ordinal indicator)</td></tr> * <tr><td>{@code ·}</td><td>(middle dot)</td> <td class="sep">{@code ⁔}</td><td>(inverted undertie)</td></tr> * <tr> * <td colspan="2">Many punctuation, symbols, <i>etc</i>.</td> * <td colspan="2" class="sep">{@linkplain Character#isIdentifierIgnorable(int) Identifier ignorable} characters.</td> * </tr> * </table> * * Note that the ‘{@code _}’ (underscore) character is legal according both Unicode and XML, while spaces, * ‘{@code !}’, ‘{@code #}’, ‘{@code *}’, ‘{@code /}’, ‘{@code ?}’ and most other punctuation characters are not. * * <h4>Usage in Apache SIS</h4> * In its handling of {@linkplain org.apache.sis.referencing.ImmutableIdentifier identifiers}, Apache SIS favors * Unicode identifiers without {@linkplain Character#isIdentifierIgnorable(int) ignorable} characters since those * identifiers are legal XML identifiers except for the above-cited rarely used characters. As a side effect, * this policy excludes ‘{@code :}’, ‘{@code -}’ and ‘{@code .}’ which would normally be legal XML identifiers. * But since those characters could easily be confused with * {@linkplain org.apache.sis.util.iso.DefaultNameSpace#DEFAULT_SEPARATOR namespace separators}, * this exclusion is considered desirable. * * @param identifier the character sequence to test, or {@code null}. * @return {@code true} if the given character sequence is a legal Unicode identifier. * * @see org.apache.sis.referencing.ImmutableIdentifier * @see org.apache.sis.metadata.iso.citation.Citations#toCodeSpace(Citation) * @see org.apache.sis.referencing.IdentifiedObjects#getSimpleNameOrIdentifier(IdentifiedObject) */ public static boolean isUnicodeIdentifier(final CharSequence identifier) { final int length = length(identifier); if (length == 0) { return false; } int c = codePointAt(identifier, 0); if (!isUnicodeIdentifierStart(c)) { return false; } for (int i=0; (i += charCount(c)) < length;) { c = codePointAt(identifier, i); if (!isUnicodeIdentifierPart(c)) { return false; } } return true; } /** * Returns {@code true} if the given text is non-null, contains at least one upper-case character and * no lower-case character. Space and punctuation are ignored. * * @param text the character sequence to test (may be {@code null}). * @return {@code true} if non-null, contains at least one upper-case character and no lower-case character. * * @see String#toUpperCase() * * @since 0.7 */ public static boolean isUpperCase(final CharSequence text) { return isUpperCase(text, 0, length(text), false); } /** * Returns {@code true} if the given sub-sequence is non-null, contains at least one upper-case character and * no lower-case character. Space and punctuation are ignored. * * @param text the character sequence to test. * @param lower index of the first character to check, inclusive. * @param upper index of the last character to check, exclusive. * @param hasUpperCase {@code true} if this method should behave as if the given text already had * at least one upper-case character (not necessarily in the portion given by the indices). * @return {@code true} if contains at least one upper-case character and no lower-case character. */ private static boolean isUpperCase(final CharSequence text, int lower, final int upper, boolean hasUpperCase) { while (lower < upper) { final int c = codePointAt(text, lower); if (Character.isLowerCase(c)) { return false; } if (!hasUpperCase) { hasUpperCase = Character.isUpperCase(c); } lower += charCount(c); } return hasUpperCase; } /** * Returns {@code true} if the given texts are equal, optionally ignoring case and filtered-out characters. * This method is sometime used for comparing identifiers in a lenient way. * * <p><b>Example:</b> the following call compares the two strings ignoring case and any * characters which are not {@linkplain Character#isLetterOrDigit(int) letter or digit}. * In particular, spaces and punctuation characters like {@code '_'} and {@code '-'} are * ignored:</p> * * {@preformat java * assert equalsFiltered("WGS84", "WGS_84", Characters.Filter.LETTERS_AND_DIGITS, true) == true; * } * * @param s1 the first characters sequence to compare, or {@code null}. * @param s2 the second characters sequence to compare, or {@code null}. * @param filter the subset of characters to compare, or {@code null} for comparing all characters. * @param ignoreCase {@code true} for ignoring cases, or {@code false} for requiring exact match. * @return {@code true} if both arguments are {@code null} or if the two given texts are equal, * optionally ignoring case and filtered-out characters. */ public static boolean equalsFiltered(final CharSequence s1, final CharSequence s2, final Characters.Filter filter, final boolean ignoreCase) { if (s1 == s2) { return true; } if (s1 == null || s2 == null) { return false; } if (filter == null) { return ignoreCase ? equalsIgnoreCase(s1, s2) : equals(s1, s2); } final int lg1 = s1.length(); final int lg2 = s2.length(); int i1 = 0, i2 = 0; while (i1 < lg1) { int c1 = codePointAt(s1, i1); final int n = charCount(c1); if (filter.contains(c1)) { int c2; // Fetch the next significant character from the second string. do { if (i2 >= lg2) { return false; // The first string has more significant characters than expected. } c2 = codePointAt(s2, i2); i2 += charCount(c2); } while (!filter.contains(c2)); // Compare the characters in the same way than String.equalsIgnoreCase(String). if (c1 != c2 && !(ignoreCase && equalsIgnoreCase(c1, c2))) { return false; } } i1 += n; } while (i2 < lg2) { final int s = codePointAt(s2, i2); if (filter.contains(s)) { return false; // The first string has less significant characters than expected. } i2 += charCount(s); } return true; } /** * Returns {@code true} if the given code points are equal, ignoring case. * This method implements the same comparison algorithm than String#equalsIgnoreCase(String). * * <p>This method does not verify if {@code c1 == c2}. This check should have been done * by the caller, since the caller code is a more optimal place for this check.</p> */ private static boolean equalsIgnoreCase(int c1, int c2) { c1 = toUpperCase(c1); c2 = toUpperCase(c2); if (c1 == c2) { return true; } // Need this check for Georgian alphabet. return toLowerCase(c1) == toLowerCase(c2); } /** * Returns {@code true} if the two given texts are equal, ignoring case. * This method is similar to {@link String#equalsIgnoreCase(String)}, except * it works on arbitrary character sequences and compares <cite>code points</cite> * instead of characters. * * @param s1 the first string to compare, or {@code null}. * @param s2 the second string to compare, or {@code null}. * @return {@code true} if the two given texts are equal, ignoring case, * or if both arguments are {@code null}. * * @see String#equalsIgnoreCase(String) */ public static boolean equalsIgnoreCase(final CharSequence s1, final CharSequence s2) { if (s1 == s2) { return true; } if (s1 == null || s2 == null) { return false; } // Do not check for String cases. We do not want to delegate to String.equalsIgnoreCase // because we compare code points while String.equalsIgnoreCase compares characters. final int lg1 = s1.length(); final int lg2 = s2.length(); int i1 = 0, i2 = 0; while (i1<lg1 && i2<lg2) { final int c1 = codePointAt(s1, i1); final int c2 = codePointAt(s2, i2); if (c1 != c2 && !equalsIgnoreCase(c1, c2)) { return false; } i1 += charCount(c1); i2 += charCount(c2); } return i1 == i2; } /** * Returns {@code true} if the two given texts are equal. This method delegates to * {@link String#contentEquals(CharSequence)} if possible. This method never invoke * {@link CharSequence#toString()} in order to avoid a potentially large copy of data. * * @param s1 the first string to compare, or {@code null}. * @param s2 the second string to compare, or {@code null}. * @return {@code true} if the two given texts are equal, or if both arguments are {@code null}. * * @see String#contentEquals(CharSequence) */ public static boolean equals(final CharSequence s1, final CharSequence s2) { if (s1 == s2) { return true; } if (s1 != null && s2 != null) { if (s1 instanceof String) return ((String) s1).contentEquals(s2); if (s2 instanceof String) return ((String) s2).contentEquals(s1); final int length = s1.length(); if (s2.length() == length) { for (int i=0; i<length; i++) { if (s1.charAt(i) != s2.charAt(i)) { return false; } } return true; } } return false; } /** * Returns {@code true} if the given text at the given offset contains the given part, * in a case-sensitive comparison. This method is equivalent to the following code, * except that this method works on arbitrary {@link CharSequence} objects instead of * {@link String}s only: * * {@preformat java * return text.regionMatches(offset, part, 0, part.length()); * } * * This method does not thrown {@code IndexOutOfBoundsException}. Instead if * {@code fromIndex < 0} or {@code fromIndex + part.length() > text.length()}, * then this method returns {@code false}. * * @param text the character sequence for which to tests for the presence of {@code part}. * @param fromIndex the offset in {@code text} where to test for the presence of {@code part}. * @param part the part which may be present in {@code text}. * @return {@code true} if {@code text} contains {@code part} at the given {@code offset}. * @throws NullPointerException if any of the arguments is null. * * @see String#regionMatches(int, String, int, int) */ public static boolean regionMatches(final CharSequence text, final int fromIndex, final CharSequence part) { if (text instanceof String && part instanceof String) { // It is okay to delegate to String implementation since we do not ignore cases. return ((String) text).startsWith((String) part, fromIndex); } final int length; if (fromIndex < 0 || fromIndex + (length = part.length()) > text.length()) { return false; } for (int i=0; i<length; i++) { // No need to use the code point API here, since we are looking for exact matches. if (text.charAt(fromIndex + i) != part.charAt(i)) { return false; } } return true; } /** * Returns {@code true} if the given text at the given offset contains the given part, * optionally in a case-insensitive way. This method is equivalent to the following code, * except that this method works on arbitrary {@link CharSequence} objects instead of * {@link String}s only: * * {@preformat java * return text.regionMatches(ignoreCase, offset, part, 0, part.length()); * } * * This method does not thrown {@code IndexOutOfBoundsException}. Instead if * {@code fromIndex < 0} or {@code fromIndex + part.length() > text.length()}, * then this method returns {@code false}. * * @param text the character sequence for which to tests for the presence of {@code part}. * @param fromIndex the offset in {@code text} where to test for the presence of {@code part}. * @param part the part which may be present in {@code text}. * @param ignoreCase {@code true} if the case should be ignored. * @return {@code true} if {@code text} contains {@code part} at the given {@code offset}. * @throws NullPointerException if any of the arguments is null. * * @see String#regionMatches(boolean, int, String, int, int) * * @since 0.4 */ public static boolean regionMatches(final CharSequence text, int fromIndex, final CharSequence part, final boolean ignoreCase) { if (!ignoreCase) { return regionMatches(text, fromIndex, part); } // Do not check for String cases. We do not want to delegate to String.regionMatches // because we compare code points while String.regionMatches(…) compares characters. final int limit = text.length(); final int length = part.length(); if (fromIndex < 0) { // Not checked before because we want NullPointerException if an argument is null. return false; } for (int i=0; i<length;) { if (fromIndex >= limit) { return false; } final int c1 = codePointAt(part, i); final int c2 = codePointAt(text, fromIndex); if (c1 != c2 && !equalsIgnoreCase(c1, c2)) { return false; } fromIndex += charCount(c2); i += charCount(c1); } return true; } /** * Returns {@code true} if the given character sequence starts with the given prefix. * * @param text the characters sequence to test. * @param prefix the expected prefix. * @param ignoreCase {@code true} if the case should be ignored. * @return {@code true} if the given sequence starts with the given prefix. * @throws NullPointerException if any of the arguments is null. */ public static boolean startsWith(final CharSequence text, final CharSequence prefix, final boolean ignoreCase) { return regionMatches(text, 0, prefix, ignoreCase); } /** * Returns {@code true} if the given character sequence ends with the given suffix. * * @param text the characters sequence to test. * @param suffix the expected suffix. * @param ignoreCase {@code true} if the case should be ignored. * @return {@code true} if the given sequence ends with the given suffix. * @throws NullPointerException if any of the arguments is null. */ public static boolean endsWith(final CharSequence text, final CharSequence suffix, final boolean ignoreCase) { int is = text.length(); int ip = suffix.length(); while (ip > 0) { if (is <= 0) { return false; } final int cs = codePointBefore(text, is); final int cp = codePointBefore(suffix, ip); if (cs != cp && (!ignoreCase || !equalsIgnoreCase(cs, cp))) { return false; } is -= charCount(cs); ip -= charCount(cp); } return true; } /** * Returns the longest sequence of characters which is found at the beginning of the two given texts. * If one of those texts is {@code null}, then the other text is returned. * If there is no common prefix, then this method returns an empty string. * * @param s1 the first text, or {@code null}. * @param s2 the second text, or {@code null}. * @return the common prefix of both texts (may be empty), or {@code null} if both texts are null. */ public static CharSequence commonPrefix(final CharSequence s1, final CharSequence s2) { if (s1 == null) return s2; if (s2 == null) return s1; final CharSequence shortest; final int lg1 = s1.length(); final int lg2 = s2.length(); final int length; if (lg1 <= lg2) { shortest = s1; length = lg1; } else { shortest = s2; length = lg2; } int i = 0; while (i < length) { // No need to use the codePointAt API here, since we are looking for exact matches. if (s1.charAt(i) != s2.charAt(i)) { break; } i++; } return shortest.subSequence(0, i); } /** * Returns the longest sequence of characters which is found at the end of the two given texts. * If one of those texts is {@code null}, then the other text is returned. * If there is no common suffix, then this method returns an empty string. * * @param s1 the first text, or {@code null}. * @param s2 the second text, or {@code null}. * @return the common suffix of both texts (may be empty), or {@code null} if both texts are null. */ public static CharSequence commonSuffix(final CharSequence s1, final CharSequence s2) { if (s1 == null) return s2; if (s2 == null) return s1; final CharSequence shortest; final int lg1 = s1.length(); final int lg2 = s2.length(); final int length; if (lg1 <= lg2) { shortest = s1; length = lg1; } else { shortest = s2; length = lg2; } int i = 0; while (++i <= length) { // No need to use the codePointAt API here, since we are looking for exact matches. if (s1.charAt(lg1 - i) != s2.charAt(lg2 - i)) { break; } } i--; return shortest.subSequence(length - i, shortest.length()); } /** * Returns the words found at the beginning and end of both texts. * The returned string is the concatenation of the {@linkplain #commonPrefix common prefix} * with the {@linkplain #commonSuffix common suffix}, with prefix and suffix eventually made * shorter for avoiding to cut in the middle of a word. * * <p>The purpose of this method is to create a global identifier from a list of component identifiers. * The later are often eastward and northward components of a vector, in which case this method provides * an identifier for the vector as a whole.</p> * * <div class="note"><b>Example:</b> * given the following inputs: * <ul> * <li>{@code "baroclinic_eastward_velocity"}</li> * <li>{@code "baroclinic_northward_velocity"}</li> * </ul> * This method returns {@code "baroclinic_velocity"}. Note that the {@code "ward"} characters * are a common suffix of both texts but nevertheless omitted because they cut a word.</div> * * <p>If one of those texts is {@code null}, then the other text is returned. * If there is no common words, then this method returns an empty string.</p> * * <h4>Possible future evolution</h4> * Current implementation searches only for a common prefix and a common suffix, ignoring any common words * that may appear in the middle of the strings. A character is considered the beginning of a word if it is * {@linkplain Character#isLetterOrDigit(int) a letter or digit} which is not preceded by another letter or * digit (as leading "s" and "c" in "snake_case"), or if it is an {@linkplain Character#isUpperCase(int) * upper case} letter preceded by a {@linkplain Character#isLowerCase(int) lower case} letter or no letter * (as both "C" in "CamelCase"). * * @param s1 the first text, or {@code null}. * @param s2 the second text, or {@code null}. * @return the common suffix of both texts (may be empty), or {@code null} if both texts are null. * * @since 1.1 */ public static CharSequence commonWords(final CharSequence s1, final CharSequence s2) { final int lg1 = length(s1); final int lg2 = length(s2); final int shortestLength = Math.min(lg1, lg2); // 0 if s1 or s2 is null, in which case prefix and suffix will have the other value. final CharSequence prefix = commonPrefix(s1, s2); int prefixLength = length(prefix); if (prefixLength >= shortestLength) return prefix; final CharSequence suffix = commonSuffix(s1, s2); int suffixLength = length(suffix); if (suffixLength >= shortestLength) return suffix; final int length = prefixLength + suffixLength; if (length >= lg1) return s1; // Check if one of the strings is already equal to prefix + suffix. if (length >= lg2) return s2; /* * At this point `s1` and `s2` contain at least one character between the prefix and the suffix. * If the prefix or the suffix seems to stop in the middle of a word, skip the remaining of that word. * For example if `s1` and `s2` are "eastward_velocity" and "northward_velocity", the common suffix is * "ward_velocity" but we want to retain only "velocity". * * The first condition below (before the loop) checks the character after the common prefix (for example "e" * in "baroclinic_eastward_velocity" if the prefix is "baroclinic_"). The intent is to handle the case where * the word separator is not the same (e.g. "baroclinic_eastward_velocity" and "baroclinic northward velocity", * in which case the '_' or ' ' character would not appear in the prefix). */ if (!isWordBoundary(s1, prefixLength, codePointAt(s1, prefixLength)) && !isWordBoundary(s2, prefixLength, codePointAt(s2, prefixLength))) { while (prefixLength > 0) { final int c = codePointBefore(prefix, prefixLength); final int n = charCount(c); prefixLength -= n; if (isWordBoundary(prefix, prefixLength, c)) { if (!isLetterOrDigit(c)) prefixLength += n; // Keep separator character. break; } } } /* * Same process than for the prefix above. The condition before the loop checks the character before suffix * for the same reason than above, but using only `isLetterOrDigit` ignoring camel-case. The reason is that * if the character before was a word separator according camel-case convention (i.e. an upper-case letter), * we would need to include it in the common suffix. */ int suffixStart = 0; if (isLetterOrDigit(codePointBefore(s1, lg1 - suffixLength)) && isLetterOrDigit(codePointBefore(s2, lg2 - suffixLength))) { while (suffixStart < suffixLength) { final int c = codePointAt(suffix, suffixStart); if (isWordBoundary(suffix, suffixStart, c)) break; suffixStart += charCount(c); } } /* * At this point we got the final prefix and suffix to use. If the prefix or suffix is empty, * trim whitespaces or '_' character. For example if the suffix is "_velocity" and no prefix, * return "velocity" without leading "_" character. */ if (prefixLength == 0) { while (suffixStart < suffixLength) { final int c = codePointAt(suffix, suffixStart); if (isLetterOrDigit(c)) { return suffix.subSequence(suffixStart, suffixLength); // Skip leading ignorable characters in suffix. } suffixStart += charCount(c); } return ""; } if (suffixStart >= suffixLength) { while (prefixLength > 0) { final int c = codePointBefore(prefix, prefixLength); if (isLetterOrDigit(c)) { return prefix.subSequence(0, prefixLength); // Skip trailing ignorable characters in prefix. } prefixLength -= charCount(c); } return ""; } /* * All special cases have been examined. Return the concatenation of (possibly shortened) * common prefix and suffix. */ final StringBuilder buffer = new StringBuilder(prefixLength + suffixLength).append(prefix); final int c1 = codePointBefore(prefix, prefixLength); final int c2 = codePointAt(suffix, suffixStart); if (isLetterOrDigit(c1) && isLetterOrDigit(c2)) { if (!Character.isUpperCase(c2) || !isLowerCase(c1)) { buffer.append(' '); // Keep a separator between two words (except if CamelCase is used). } } else if (c1 == c2) { suffixStart += charCount(c2); // Avoid repeating '_' in e.g. "baroclinic_<removed>_velocity". } return buffer.append(suffix, suffixStart, suffixLength).toString(); } /** * Returns {@code true} if the character {@code c} is the beginning of a word or a non-word character. * For example this method returns {@code true} if {@code c} is {@code '_'} in {@code "snake_case"} or * {@code "C"} in {@code "CamelCase"}. * * @param s the character sequence from which the {@code c} character has been obtained. * @param i the index in {@code s} where the {@code c} character has been obtained. * @param c the code point in {@code s} as index {@code i}. * @return whether the given character is the beginning of a word or a non-word character. */ private static boolean isWordBoundary(final CharSequence s, final int i, final int c) { if (!isLetterOrDigit(c)) return true; if (!Character.isUpperCase(c)) return false; return (i <= 0 || isLowerCase(codePointBefore(s, i))); } /** * Returns the token starting at the given offset in the given text. For the purpose of this * method, a "token" is any sequence of consecutive characters of the same type, as defined * below. * * <p>Let define <var>c</var> as the first non-blank character located at an index equals or * greater than the given offset. Then the characters that are considered of the same type * are:</p> * * <ul> * <li>If <var>c</var> is a * {@linkplain Character#isUnicodeIdentifierStart(int) Unicode identifier start}, * then any following characters that are * {@linkplain Character#isUnicodeIdentifierPart(int) Unicode identifier part}.</li> * <li>Otherwise any character for which {@link Character#getType(int)} returns * the same value than for <var>c</var>.</li> * </ul> * * @param text the text for which to get the token. * @param fromIndex index of the fist character to consider in the given text. * @return a sub-sequence of {@code text} starting at the given offset, or an empty string * if there is no non-blank character at or after the given offset. * @throws NullPointerException if the {@code text} argument is null. */ public static CharSequence token(final CharSequence text, int fromIndex) { final int length = text.length(); int upper = fromIndex; /* * Skip whitespaces. At the end of this loop, * 'c' will be the first non-blank character. */ int c; do { if (upper >= length) return ""; c = codePointAt(text, upper); fromIndex = upper; upper += charCount(c); } while (isWhitespace(c)); /* * Advance over all characters "of the same type". */ if (isUnicodeIdentifierStart(c)) { while (upper<length && isUnicodeIdentifierPart(c = codePointAt(text, upper))) { upper += charCount(c); } } else { final int type = getType(codePointAt(text, fromIndex)); while (upper<length && getType(c = codePointAt(text, upper)) == type) { upper += charCount(c); } } return text.subSequence(fromIndex, upper); } /** * Replaces all occurrences of a given string in the given character sequence. If no occurrence of * {@code toSearch} is found in the given text or if {@code toSearch} is equal to {@code replaceBy}, * then this method returns the {@code text} unchanged. * Otherwise this method returns a new character sequence with all occurrences replaced by {@code replaceBy}. * * <p>This method is similar to {@link String#replace(CharSequence, CharSequence)} except that is accepts * arbitrary {@code CharSequence} objects. As of Java 10, another difference is that this method does not * create a new {@code String} if {@code toSearch} is equals to {@code replaceBy}.</p> * * @param text the character sequence in which to perform the replacements, or {@code null}. * @param toSearch the string to replace. * @param replaceBy the replacement for the searched string. * @return the given text with replacements applied, or {@code text} if no replacement has been applied, * or {@code null} if the given text was null * * @see String#replace(char, char) * @see StringBuilders#replace(StringBuilder, String, String) * @see String#replace(CharSequence, CharSequence) * * @since 0.4 */ public static CharSequence replace(final CharSequence text, final CharSequence toSearch, final CharSequence replaceBy) { ArgumentChecks.ensureNonEmpty("toSearch", toSearch); ArgumentChecks.ensureNonNull ("replaceBy", replaceBy); if (text != null && !toSearch.equals(replaceBy)) { if (text instanceof String) { return ((String) text).replace(toSearch, replaceBy); } final int length = text.length(); int i = indexOf(text, toSearch, 0, length); if (i >= 0) { int p = 0; final int sl = toSearch.length(); final StringBuilder buffer = new StringBuilder(length + (replaceBy.length() - sl)); do { buffer.append(text, p, i).append(replaceBy); i = indexOf(text, toSearch, p = i + sl, length); } while (i >= 0); return buffer.append(text, p, length); } } return text; } /** * Copies a sequence of characters in the given {@code char[]} array. * * @param src the characters sequence from which to copy characters. * @param srcOffset index of the first character from {@code src} to copy. * @param dst the array where to copy the characters. * @param dstOffset index where to write the first character in {@code dst}. * @param length number of characters to copy. * * @see String#getChars(int, int, char[], int) * @see StringBuilder#getChars(int, int, char[], int) * @see StringBuffer#getChars(int, int, char[], int) * @see CharBuffer#get(char[], int, int) * @see javax.swing.text.Segment#array */ public static void copyChars(final CharSequence src, int srcOffset, final char[] dst, int dstOffset, int length) { ArgumentChecks.ensurePositive("length", length); if (src instanceof String) { ((String) src).getChars(srcOffset, srcOffset + length, dst, dstOffset); } else if (src instanceof StringBuilder) { ((StringBuilder) src).getChars(srcOffset, srcOffset + length, dst, dstOffset); } else if (src instanceof StringBuffer) { ((StringBuffer) src).getChars(srcOffset, srcOffset + length, dst, dstOffset); } else if (src instanceof CharBuffer) { ((CharBuffer) src).subSequence(srcOffset, srcOffset + length).get(dst, dstOffset, length); } else { /* * Another candidate could be `javax.swing.text.Segment`, but it * is probably not worth to introduce a Swing dependency for it. */ while (length != 0) { dst[dstOffset++] = src.charAt(srcOffset++); length--; } } } }
apache/sis
core/sis-utility/src/main/java/org/apache/sis/util/CharSequences.java
Java
apache-2.0
106,867
// Copyright 2006-2009 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ======================================================================== #include "omaha/mi_exe_stub/process.h" namespace omaha { namespace { bool run_and_wait(const CString& command_line, DWORD* exit_code, bool wait, int cmd_show) { CString cmd_line(command_line); STARTUPINFO si = {}; PROCESS_INFORMATION pi = {}; GetStartupInfo(&si); si.dwFlags |= STARTF_FORCEOFFFEEDBACK | STARTF_USESHOWWINDOW; si.wShowWindow = static_cast<WORD>(cmd_show); BOOL create_process_result = CreateProcess(NULL, cmd_line.GetBuffer(), NULL, NULL, FALSE, CREATE_UNICODE_ENVIRONMENT, NULL, NULL, &si, &pi); if (!create_process_result) { *exit_code = GetLastError(); return false; } if (wait) { WaitForSingleObject(pi.hProcess, INFINITE); } bool result = true; if (exit_code) { result = !!GetExitCodeProcess(pi.hProcess, exit_code); } CloseHandle(pi.hProcess); CloseHandle(pi.hThread); return result; } } // namespace bool RunAndWaitHidden(const CString& command_line, DWORD *exit_code) { return run_and_wait(command_line, exit_code, true, SW_HIDE); } bool RunAndWait(const CString& command_line, DWORD *exit_code) { return run_and_wait(command_line, exit_code, true, SW_SHOWNORMAL); } bool Run(const CString& command_line) { return run_and_wait(command_line, NULL, false, SW_SHOWNORMAL); } } // namespace omaha
pinkpegasus/omaha_vs2010
mi_exe_stub/process.cc
C++
apache-2.0
2,497
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.netty4; import io.netty.channel.EventLoopGroup; import io.netty.channel.epoll.EpollEventLoopGroup; import io.netty.channel.nio.NioEventLoopGroup; import org.apache.camel.util.concurrent.CamelThreadFactory; /** * A builder to create Netty {@link io.netty.channel.EventLoopGroup} which can be used for sharing worker pools * with multiple Netty {@link NettyServerBootstrapFactory} server bootstrap configurations. */ public final class NettyWorkerPoolBuilder { private String name = "NettyWorker"; private String pattern; private int workerCount; private boolean nativeTransport; private volatile EventLoopGroup workerPool; public void setName(String name) { this.name = name; } public void setPattern(String pattern) { this.pattern = pattern; } public void setWorkerCount(int workerCount) { this.workerCount = workerCount; } public void setNativeTransport(boolean nativeTransport) { this.nativeTransport = nativeTransport; } public NettyWorkerPoolBuilder withName(String name) { setName(name); return this; } public NettyWorkerPoolBuilder withPattern(String pattern) { setPattern(pattern); return this; } public NettyWorkerPoolBuilder withWorkerCount(int workerCount) { setWorkerCount(workerCount); return this; } public NettyWorkerPoolBuilder withNativeTransport(boolean nativeTransport) { setNativeTransport(nativeTransport); return this; } /** * Creates a new worker pool. */ public EventLoopGroup build() { int count = workerCount > 0 ? workerCount : NettyHelper.DEFAULT_IO_THREADS; if (nativeTransport) { workerPool = new EpollEventLoopGroup(count, new CamelThreadFactory(pattern, name, false)); } else { workerPool = new NioEventLoopGroup(count, new CamelThreadFactory(pattern, name, false)); } return workerPool; } /** * Shutdown the created worker pool */ public void destroy() { if (workerPool != null) { workerPool.shutdownGracefully(); workerPool = null; } } }
Fabryprog/camel
components/camel-netty4/src/main/java/org/apache/camel/component/netty4/NettyWorkerPoolBuilder.java
Java
apache-2.0
3,053
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.common.params; import java.util.EnumSet; import java.util.Locale; import org.apache.solr.common.SolrException; /** Facet parameters */ public interface FacetParams { /** Should facet counts be calculated? */ public static final String FACET = "facet"; /** * Numeric option indicating the maximum number of threads to be used in counting facet field * vales */ public static final String FACET_THREADS = FACET + ".threads"; /** What method should be used to do the faceting */ public static final String FACET_METHOD = FACET + ".method"; /** * Value for FACET_METHOD param to indicate that Solr should enumerate over terms in a field to * calculate the facet counts. */ public static final String FACET_METHOD_enum = "enum"; /** * Value for FACET_METHOD param to indicate that Solr should enumerate over documents and count up * terms by consulting an uninverted representation of the field values (such as the FieldCache * used for sorting). */ public static final String FACET_METHOD_fc = "fc"; /** Value for FACET_METHOD param, like FACET_METHOD_fc but counts per-segment. */ public static final String FACET_METHOD_fcs = "fcs"; /** Value for FACET_METHOD param to indicate that Solr should use an UnInvertedField */ public static final String FACET_METHOD_uif = "uif"; /** * Any lucene formated queries the user would like to use for Facet Constraint Counts * (multi-value) */ public static final String FACET_QUERY = FACET + ".query"; /** * Any field whose terms the user wants to enumerate over for Facet Constraint Counts * (multi-value) */ public static final String FACET_FIELD = FACET + ".field"; /** The offset into the list of facets. Can be overridden on a per field basis. */ public static final String FACET_OFFSET = FACET + ".offset"; /** * Numeric option indicating the maximum number of facet field counts be included in the response * for each field - in descending order of count. Can be overridden on a per field basis. */ public static final String FACET_LIMIT = FACET + ".limit"; /** * Numeric option indicating the minimum number of hits before a facet should be included in the * response. Can be overridden on a per field basis. */ public static final String FACET_MINCOUNT = FACET + ".mincount"; /** * Boolean option indicating whether facet field counts of "0" should be included in the response. * Can be overridden on a per field basis. */ public static final String FACET_ZEROS = FACET + ".zeros"; /** * Boolean option indicating whether the response should include a facet field count for all * records which have no value for the facet field. Can be overridden on a per field basis. */ public static final String FACET_MISSING = FACET + ".missing"; static final String FACET_OVERREQUEST = FACET + ".overrequest"; /** * The percentage to over-request by when performing initial distributed requests. * * <p>default value is 1.5 */ public static final String FACET_OVERREQUEST_RATIO = FACET_OVERREQUEST + ".ratio"; /** * An additional amount to over-request by when performing initial distributed requests. This * value will be added after accounting for the over-request ratio. * * <p>default value is 10 */ public static final String FACET_OVERREQUEST_COUNT = FACET_OVERREQUEST + ".count"; /** * Comma separated list of fields to pivot * * <p>example: author,type (for types by author / types within author) */ public static final String FACET_PIVOT = FACET + ".pivot"; /** * Minimum number of docs that need to match to be included in the sublist * * <p>default value is 1 */ public static final String FACET_PIVOT_MINCOUNT = FACET_PIVOT + ".mincount"; /** * String option: "count" causes facets to be sorted by the count, "index" results in index order. */ public static final String FACET_SORT = FACET + ".sort"; public static final String FACET_SORT_COUNT = "count"; public static final String FACET_SORT_COUNT_LEGACY = "true"; public static final String FACET_SORT_INDEX = "index"; public static final String FACET_SORT_INDEX_LEGACY = "false"; /** Only return constraints of a facet field with the given prefix. */ public static final String FACET_PREFIX = FACET + ".prefix"; /** Only return constraints of a facet field containing the given string. */ public static final String FACET_CONTAINS = FACET + ".contains"; /** Only return constraints of a facet field containing the given string. */ public static final String FACET_MATCHES = FACET + ".matches"; /** If using facet contains, ignore case when comparing values. */ public static final String FACET_CONTAINS_IGNORE_CASE = FACET_CONTAINS + ".ignoreCase"; /** Only return constraints of a facet field excluding the given string. */ public static final String FACET_EXCLUDETERMS = FACET + ".excludeTerms"; /** * When faceting by enumerating the terms in a field, only use the filterCache for terms with a df * &gt;= to this parameter. */ public static final String FACET_ENUM_CACHE_MINDF = FACET + ".enum.cache.minDf"; /** * A boolean parameter that caps the facet counts at 1. With this set, a returned count will only * be 0 or 1. For apps that don't need the count, this should be an optimization */ public static final String FACET_EXISTS = FACET + ".exists"; /** * Any field whose terms the user wants to enumerate over for Facet Contraint Counts (multi-value) */ public static final String FACET_DATE = FACET + ".date"; /** * Date string indicating the starting point for a date facet range. Can be overridden on a per * field basis. */ public static final String FACET_DATE_START = FACET_DATE + ".start"; /** * Date string indicating the ending point for a date facet range. Can be overridden on a per * field basis. */ public static final String FACET_DATE_END = FACET_DATE + ".end"; /** * Date Math string indicating the interval of sub-ranges for a date facet range. Can be * overridden on a per field basis. */ public static final String FACET_DATE_GAP = FACET_DATE + ".gap"; /** * Boolean indicating how counts should be computed if the range between 'start' and 'end' is not * evenly divisible by 'gap'. If this value is true, then all counts of ranges involving the 'end' * point will use the exact endpoint specified -- this includes the 'between' and 'after' counts * as well as the last range computed using the 'gap'. If the value is false, then 'gap' is used * to compute the effective endpoint closest to the 'end' param which results in the range between * 'start' and 'end' being evenly divisible by 'gap'. * * <p>The default is false. * * <p>Can be overridden on a per field basis. */ public static final String FACET_DATE_HARD_END = FACET_DATE + ".hardend"; /** * String indicating what "other" ranges should be computed for a date facet range (multi-value). * * <p>Can be overridden on a per field basis. * * @see FacetRangeOther */ public static final String FACET_DATE_OTHER = FACET_DATE + ".other"; /** * Multivalued string indicating what rules should be applied to determine when the ranges * generated for date faceting should be inclusive or exclusive of their end points. * * <p>The default value if none are specified is: [lower,upper,edge] <i>(NOTE: This is different * then FACET_RANGE_INCLUDE)</i> * * <p>Can be overridden on a per field basis. * * @see FacetRangeInclude * @see #FACET_RANGE_INCLUDE */ public static final String FACET_DATE_INCLUDE = FACET_DATE + ".include"; /** * Any numerical field whose terms the user wants to enumerate over Facet Contraint Counts for * selected ranges. */ public static final String FACET_RANGE = FACET + ".range"; /** * Number indicating the starting point for a numerical range facet. Can be overridden on a per * field basis. */ public static final String FACET_RANGE_START = FACET_RANGE + ".start"; /** * Number indicating the ending point for a numerical range facet. Can be overridden on a per * field basis. */ public static final String FACET_RANGE_END = FACET_RANGE + ".end"; /** * Number indicating the interval of sub-ranges for a numerical facet range. Can be overridden on * a per field basis. */ public static final String FACET_RANGE_GAP = FACET_RANGE + ".gap"; /** * Boolean indicating how counts should be computed if the range between 'start' and 'end' is not * evenly divisible by 'gap'. If this value is true, then all counts of ranges involving the 'end' * point will use the exact endpoint specified -- this includes the 'between' and 'after' counts * as well as the last range computed using the 'gap'. If the value is false, then 'gap' is used * to compute the effective endpoint closest to the 'end' param which results in the range between * 'start' and 'end' being evenly divisible by 'gap'. * * <p>The default is false. * * <p>Can be overridden on a per field basis. */ public static final String FACET_RANGE_HARD_END = FACET_RANGE + ".hardend"; /** * String indicating what "other" ranges should be computed for a numerical range facet * (multi-value). Can be overridden on a per field basis. */ public static final String FACET_RANGE_OTHER = FACET_RANGE + ".other"; /** * Multivalued string indicating what rules should be applied to determine when the ranges * generated for numeric faceting should be inclusive or exclusive of their end points. * * <p>The default value if none are specified is: lower * * <p>Can be overridden on a per field basis. * * @see FacetRangeInclude */ public static final String FACET_RANGE_INCLUDE = FACET_RANGE + ".include"; /** * String indicating the method to use to resolve range facets. * * <p>Can be overridden on a per field basis. * * @see FacetRangeMethod */ public static final String FACET_RANGE_METHOD = FACET_RANGE + ".method"; /** Any field whose values the user wants to enumerate as explicit intervals of terms. */ public static final String FACET_INTERVAL = FACET + ".interval"; /** Set of terms for a single interval to facet on. */ public static final String FACET_INTERVAL_SET = FACET_INTERVAL + ".set"; /** * A spatial RPT field to generate a 2D "heatmap" (grid of facet counts) on. Just like the other * faceting types, this may include a 'key' or local-params to facet multiple times. All * parameters with this suffix can be overridden on a per-field basis. */ public static final String FACET_HEATMAP = "facet.heatmap"; /** The format of the heatmap: either png or ints2D (default). */ public static final String FACET_HEATMAP_FORMAT = FACET_HEATMAP + ".format"; /** * The region the heatmap should minimally enclose. It defaults to the world if not set. The * format can either be a minimum to maximum point range format: * * <pre>["-150 10" TO "-100 30"]</pre> * * (the first is bottom-left and second is bottom-right, both of which are parsed as points are * parsed). OR, any WKT can be provided and it's bounding box will be taken. */ public static final String FACET_HEATMAP_GEOM = FACET_HEATMAP + ".geom"; /** * Specify the heatmap grid level explicitly, instead of deriving it via distErr or distErrPct. */ public static final String FACET_HEATMAP_LEVEL = FACET_HEATMAP + ".gridLevel"; /** * Used to determine the heatmap grid level to compute, defaulting to 0.15. It has the same * interpretation of distErrPct when searching on RPT, but relative to the shape in 'bbox'. It's a * fraction (not a %) of the radius of the shape that grid squares must fit into without * exceeding. &gt; 0 and &lt;= 0.5. Mutually exclusive with distErr &amp; gridLevel. */ public static final String FACET_HEATMAP_DIST_ERR_PCT = FACET_HEATMAP + ".distErrPct"; /** * Used to determine the heatmap grid level to compute (optional). It has the same interpretation * of maxDistErr or distErr with RPT. It's an absolute distance (in units of what's specified on * the field type) that a grid square must maximally fit into (width &amp; height). It can be used * to to more explicitly specify the maximum grid square size without knowledge of what particular * grid levels translate to. This can in turn be used with knowledge of the size of 'bbox' to get * a target minimum number of grid cells. Mutually exclusive with distErrPct &amp; gridLevel. */ public static final String FACET_HEATMAP_DIST_ERR = FACET_HEATMAP + ".distErr"; /** * The maximum number of cells (grid squares) the client is willing to handle. If this limit would * be exceeded, we throw an error instead. Defaults to 100k. */ public static final String FACET_HEATMAP_MAX_CELLS = FACET_HEATMAP + ".maxCells"; /** * An enumeration of the legal values for {@link #FACET_RANGE_OTHER} and {@link #FACET_DATE_OTHER} * ... * * <ul> * <li>before = the count of matches before the start * <li>after = the count of matches after the end * <li>between = the count of all matches between start and end * <li>all = all of the above (default value) * <li>none = no additional info requested * </ul> * * @see #FACET_RANGE_OTHER * @see #FACET_DATE_OTHER */ public enum FacetRangeOther { BEFORE, AFTER, BETWEEN, ALL, NONE; @Override public String toString() { return super.toString().toLowerCase(Locale.ROOT); } public static FacetRangeOther get(String label) { try { return valueOf(label.toUpperCase(Locale.ROOT)); } catch (IllegalArgumentException e) { throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, label + " is not a valid type of 'other' range facet information", e); } } } /** * An enumeration of the legal values for {@link #FACET_DATE_INCLUDE} and {@link * #FACET_RANGE_INCLUDE} <br> * * <ul> * <li>lower = all gap based ranges include their lower bound * <li>upper = all gap based ranges include their upper bound * <li>edge = the first and last gap ranges include their edge bounds (ie: lower for the first * one, upper for the last one) even if the corresponding upper/lower option is not * specified * <li>outer = the BEFORE and AFTER ranges should be inclusive of their bounds, even if the * first or last ranges already include those boundaries. * <li>all = shorthand for lower, upper, edge, and outer * </ul> * * @see #FACET_DATE_INCLUDE * @see #FACET_RANGE_INCLUDE */ public enum FacetRangeInclude { ALL, LOWER, UPPER, EDGE, OUTER; @Override public String toString() { return super.toString().toLowerCase(Locale.ROOT); } public static FacetRangeInclude get(String label) { try { return valueOf(label.toUpperCase(Locale.ROOT)); } catch (IllegalArgumentException e) { throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, label + " is not a valid type of for range 'include' information", e); } } /** * Convinience method for parsing the param value according to the correct semantics and * applying the default of "LOWER" */ public static EnumSet<FacetRangeInclude> parseParam(final String[] param) { // short circut for default behavior if (null == param || 0 == param.length) return EnumSet.of(LOWER); // build up set containing whatever is specified final EnumSet<FacetRangeInclude> include = EnumSet.noneOf(FacetRangeInclude.class); for (final String o : param) { include.add(FacetRangeInclude.get(o)); } // if set contains all, then we're back to short circuting if (include.contains(FacetRangeInclude.ALL)) return EnumSet.allOf(FacetRangeInclude.class); // use whatever we've got. return include; } } /** * An enumeration of the legal values for {@link #FACET_RANGE_METHOD} * * <ul> * <li>filter = * <li>dv = * </ul> * * @see #FACET_RANGE_METHOD */ public enum FacetRangeMethod { FILTER, DV; @Override public String toString() { return super.toString().toLowerCase(Locale.ROOT); } public static FacetRangeMethod get(String label) { try { return valueOf(label.toUpperCase(Locale.ROOT)); } catch (IllegalArgumentException e) { throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, label + " is not a valid method for range faceting", e); } } public static FacetRangeMethod getDefault() { return FILTER; } } }
apache/solr
solr/solrj/src/java/org/apache/solr/common/params/FacetParams.java
Java
apache-2.0
17,854
/* * Copyright 2015 Textocat * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.textocat.textokit.morph.commons; import com.google.common.base.Function; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import com.textocat.textokit.morph.dictionary.resource.GramModel; import com.textocat.textokit.morph.fs.Word; import com.textocat.textokit.morph.fs.Wordform; import com.textocat.textokit.postagger.MorphCasUtils; import java.util.BitSet; import java.util.Set; import static com.textocat.textokit.morph.commons.PunctuationUtils.punctuationTagMap; import static com.textocat.textokit.morph.model.MorphConstants.*; /** * EXPERIMENTAL <br> * EXPERIMENTAL <br> * EXPERIMENTAL * * @author Rinat Gareev */ public class TagUtils { private static final Set<String> closedPosSet = ImmutableSet.of(NPRO, Apro, PREP, CONJ, PRCL); /** * @param dict * @return function that returns true if the given gram bits represents a * closed class tag */ public static Function<BitSet, Boolean> getClosedClassIndicator(GramModel gm) { // initialize mask final BitSet closedClassTagsMask = new BitSet(); for (String cpGram : closedPosSet) { closedClassTagsMask.set(gm.getGrammemNumId(cpGram)); } // return new Function<BitSet, Boolean>() { @Override public Boolean apply(BitSet _wfBits) { BitSet wfBits = (BitSet) _wfBits.clone(); wfBits.and(closedClassTagsMask); return !wfBits.isEmpty(); } }; } // FIXME refactor hard-coded dependency on a tag mapper implementation public static boolean isClosedClassTag(String tag) { return closedClassPunctuationTags.contains(tag) || !Sets.intersection( GramModelBasedTagMapper.parseTag(tag), closedPosSet) .isEmpty(); } public static String postProcessExternalTag(String tag) { return !"null".equals(String.valueOf(tag)) ? tag : null; } public static final Set<String> closedClassPunctuationTags = ImmutableSet .copyOf(punctuationTagMap.values()); public static final Function<Word, String> tagFunction() { return tagFunction; } private static final Function<Word, String> tagFunction = new Function<Word, String>() { @Override public String apply(Word word) { if (word == null) { return null; } Wordform wf = MorphCasUtils.requireOnlyWordform(word); return wf.getPos(); } }; private TagUtils() { } }
textocat/textokit-core
Textokit.PosTagger.API/src/main/java/com/textocat/textokit/morph/commons/TagUtils.java
Java
apache-2.0
3,235
/** * Copyright 2016 The AMP HTML Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS-IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import {Poller} from './poller'; import {Services} from '../../../src/services'; import {addParamToUrl} from '../../../src/url'; import {fetchDocument} from '../../../src/document-fetcher'; import {getMode} from '../../../src/mode'; import {getServicePromiseForDoc} from '../../../src/service'; import {toArray} from '../../../src/types'; import {userAssert} from '../../../src/log'; /** @const {string} */ export const SERVICE_ID = 'liveListManager'; const TRANSFORMED_PREFIX = 'google;v='; /** * Property used for storing id of custom slot. This custom slot can be used to * replace the default "items" and "update" slot. * @const {string} */ export const AMP_LIVE_LIST_CUSTOM_SLOT_ID = 'AMP_LIVE_LIST_CUSTOM_SLOT_ID'; /** * Manages registered AmpLiveList components. * Primarily handles network requests and updates the components * if necessary. * @implements {../../../src/service.Disposable} */ export class LiveListManager { /** * @param {!../../../src/service/ampdoc-impl.AmpDoc} ampdoc */ constructor(ampdoc) { /** @const */ this.ampdoc = ampdoc; /** @private @const {!Object<string, !./amp-live-list.AmpLiveList>} */ this.liveLists_ = Object.create(null); /** @private @const {!../../../src/service/extensions-impl.Extensions} */ this.extensions_ = Services.extensionsFor(this.ampdoc.win); /** @private {number} */ this.interval_ = 15000; /** @private @const {!Array<number>} */ this.intervals_ = [this.interval_]; /** @private {?Poller} */ this.poller_ = null; /** @private @const {string} */ this.url_ = this.ampdoc.getUrl(); /** @private {time} */ this.latestUpdateTime_ = 0; /** @private @const {function(): Promise} */ this.work_ = this.fetchDocument_.bind(this); /** @private @const {boolean} */ this.isTransformed_ = isDocTransformed(ampdoc.getRootNode()); // Only start polling when doc is ready and when the doc is visible. this.whenDocReady_().then(() => { // Switch out the poller interval if we can find a lower one and // then make sure to stop polling if doc is not visible. this.interval_ = Math.min.apply(Math, this.intervals_); const initialUpdateTimes = Object.keys(this.liveLists_).map((key) => this.liveLists_[key].getUpdateTime() ); this.latestUpdateTime_ = Math.max.apply(Math, initialUpdateTimes); // For testing purposes only, we speed up the interval of the update. // This should NEVER be allowed in production. if (getMode().localDev) { const path = this.ampdoc.win.location.pathname; if ( path.indexOf('/examples/live-list-update.amp.html') != -1 || path.indexOf('/examples/live-blog.amp.html') != -1 || path.indexOf('/examples/live-blog-non-floating-button.amp.html') != -1 ) { this.interval_ = 5000; } } this.poller_ = new Poller(this.ampdoc.win, this.interval_, this.work_); // If no live-list is active on dom ready, we don't need to poll at all. if (this.ampdoc.isVisible() && this.hasActiveLiveLists_()) { this.poller_.start(); } this.setupVisibilityHandler_(); }); } /** @override */ dispose() { this.poller_.stop(); } /** * @param {!Element} element * @return {!Promise<!LiveListManager>} */ static forDoc(element) { return /** @type {!Promise<!LiveListManager>} */ (getServicePromiseForDoc( element, SERVICE_ID )); } /** * Checks if any of the registered amp-live-list components is active/ * * @return {boolean} * @private */ hasActiveLiveLists_() { return Object.keys(this.liveLists_).some((key) => { return this.liveLists_[key].isEnabled(); }); } /** * Makes a request to the given url for the latest document. * * @private */ fetchDocument_() { let url = this.url_; if (this.latestUpdateTime_ > 0) { url = addParamToUrl( url, 'amp_latest_update_time', String(this.latestUpdateTime_) ); } if (this.isTransformed_) { const urlService = Services.urlForDoc(this.ampdoc.getBody()); url = urlService.getCdnUrlOnOrigin(url); } // TODO(erwinm): add update time here when possible. return fetchDocument(this.ampdoc.win, url, {}).then( this.updateLiveLists_.bind(this) ); } /** * Gets all live lists and updates them with their corresponding counterparts. * Saves latest update time. * * @param {!Document} doc * @private */ updateLiveLists_(doc) { this.installExtensionsForDoc_(doc); const allLiveLists = this.getLiveLists_(doc).concat( this.getCustomSlots_(doc) ); const updateTimes = allLiveLists.map(this.updateLiveList_.bind(this)); const latestUpdateTime = Math.max.apply(Math, [0].concat(updateTimes)); if (latestUpdateTime > 0) { this.latestUpdateTime_ = latestUpdateTime; } // We need to do this after calling `updateLiveList` since that // would apply the disabled attribute if any exist from the server. if (!this.hasActiveLiveLists_()) { this.poller_.stop(); } } /** * Queries the document for all `amp-live-list` tags. * * @param {!Document} doc * @return {!Array<!Element>} * @private */ getLiveLists_(doc) { return Array.prototype.slice.call( doc.getElementsByTagName('amp-live-list') ); } /** * Queries for custom slots that will be used to host the live elements. This * overrides looking for live elements inside the default <amp-live-list> * element. * * @param {!Document} doc * @return {!Array<!Element>} * @private */ getCustomSlots_(doc) { const liveListsWithCustomSlots = Object.keys(this.liveLists_).filter((id) => this.liveLists_[id].hasCustomSlot() ); return liveListsWithCustomSlots.map((id) => { const customSlotId = this.liveLists_[id].element[ AMP_LIVE_LIST_CUSTOM_SLOT_ID ]; return doc.getElementById(customSlotId); }); } /** * Updates the appropriate `amp-live-list` with its updates from the server. * * @param {!Element} liveList Live list or custom element that built it. * @return {number} */ updateLiveList_(liveList) { // amp-live-list elements can be appended dynamically in the client by // another component using the `i-amphtml-` + `other-component-id` + // `-dynamic-list` combination as the ID of the amp-live-list. // // The fact that we know how this ID is built allows us to find the // amp-live-list element in the server document. See live-story-manager.js // for an example. const dynamicId = 'i-amphtml-' + liveList.id + '-dynamic-list'; const id = dynamicId in this.liveLists_ ? dynamicId : liveList.getAttribute('id'); userAssert(id, 'amp-live-list must have an id.'); userAssert( id in this.liveLists_, 'amp-live-list#%s found but did not exist on original page load.', id ); const inClientDomLiveList = this.liveLists_[id]; inClientDomLiveList.toggle( !liveList.hasAttribute('disabled') && // When the live list is an amp-story, we use an amp-story specific // attribute so publishers can disable the live story functionality. !liveList.hasAttribute('live-story-disabled') ); if (inClientDomLiveList.isEnabled()) { return inClientDomLiveList.update(liveList); } return 0; } /** * Register an `amp-live-list` instance for updates. * * @param {string} id * @param {!./amp-live-list.AmpLiveList} liveList */ register(id, liveList) { if (id in this.liveLists_) { return; } this.liveLists_[id] = liveList; this.intervals_.push(liveList.getInterval()); // Polling may not be started yet if no live lists were registered by // doc ready in LiveListManager's constructor. if (liveList.isEnabled() && this.poller_ && this.ampdoc.isVisible()) { this.poller_.start(); } } /** * Returns a promise that is resolved when the document is ready. * @return {!Promise} * @private */ whenDocReady_() { return this.ampdoc.whenReady(); } /** * Listens to he doc visibility changed event. * @private */ setupVisibilityHandler_() { // Polling should always be stopped when document is no longer visible. this.ampdoc.onVisibilityChanged(() => { if (this.ampdoc.isVisible() && this.hasActiveLiveLists_()) { // We use immediate so that the user starts getting updates // right away when they've switched back to the page. this.poller_.start(/** immediate */ true); } else { this.poller_.stop(); } }); } /** * @param {!Document} doc */ installExtensionsForDoc_(doc) { const extensions = toArray( doc.querySelectorAll('script[custom-element], script[custom-template]') ); extensions.forEach((script) => { const extensionName = script.getAttribute('custom-element') || script.getAttribute('custom-template'); // This is a cheap operation if extension is already installed so no need // to over optimize checks. this.extensions_.installExtensionForDoc(this.ampdoc, extensionName); }); } /** * Default minimum data poll interval value. * * @return {number} */ static getMinDataPollInterval() { // TODO(erwinm): determine if value is too low return 15000; } /** * Default minimum data max items per page value. * * @return {number} */ static getMinDataMaxItemsPerPage() { return 1; } } /** * Detects if a document has had transforms applied * e.g. by a domain with signed exchange domain enabled. * @param {!Document|!ShadowRoot} root * @return {boolean} */ function isDocTransformed(root) { if (!root.ownerDocument) { return false; } const {documentElement} = root.ownerDocument; const transformed = documentElement.getAttribute('transformed'); return Boolean(transformed) && transformed.startsWith(TRANSFORMED_PREFIX); }
zhouyx/amphtml
extensions/amp-live-list/0.1/live-list-manager.js
JavaScript
apache-2.0
10,792
package org.eclipse.jetty.server.handler; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.BufferedReader; import java.io.EOFException; import java.io.IOException; import java.net.Socket; import java.util.LinkedHashMap; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.eclipse.jetty.server.Connector; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.nio.SelectChannelConnector; import org.junit.AfterClass; /** * @version $Revision$ $Date$ */ public abstract class AbstractConnectHandlerTest { protected static Server server; protected static Connector serverConnector; protected static Server proxy; protected static Connector proxyConnector; protected static void startServer(Connector connector, Handler handler) throws Exception { server = new Server(); serverConnector = connector; server.addConnector(serverConnector); server.setHandler(handler); server.start(); } protected static void startProxy() throws Exception { proxy = new Server(); proxyConnector = new SelectChannelConnector(); proxy.addConnector(proxyConnector); proxy.setHandler(new ConnectHandler()); proxy.start(); } @AfterClass public static void stop() throws Exception { stopProxy(); stopServer(); } protected static void stopServer() throws Exception { server.stop(); server.join(); } protected static void stopProxy() throws Exception { proxy.stop(); proxy.join(); } protected Response readResponse(BufferedReader reader) throws IOException { // Simplified parser for HTTP responses String line = reader.readLine(); if (line == null) throw new EOFException(); Matcher responseLine = Pattern.compile("HTTP/1\\.1\\s+(\\d+)").matcher(line); assertTrue(responseLine.lookingAt()); String code = responseLine.group(1); Map<String, String> headers = new LinkedHashMap<String, String>(); while ((line = reader.readLine()) != null) { if (line.trim().length() == 0) break; Matcher header = Pattern.compile("([^:]+):\\s*(.*)").matcher(line); assertTrue(header.lookingAt()); String headerName = header.group(1); String headerValue = header.group(2); headers.put(headerName.toLowerCase(), headerValue.toLowerCase()); } StringBuilder body = new StringBuilder(); if (headers.containsKey("content-length")) { int length = Integer.parseInt(headers.get("content-length")); for (int i = 0; i < length; ++i) { char c = (char)reader.read(); body.append(c); } } else if ("chunked".equals(headers.get("transfer-encoding"))) { while ((line = reader.readLine()) != null) { if ("0".equals(line)) { line = reader.readLine(); assertEquals("", line); break; } int length = Integer.parseInt(line, 16); for (int i = 0; i < length; ++i) { char c = (char)reader.read(); body.append(c); } line = reader.readLine(); assertEquals("", line); } } return new Response(code, headers, body.toString().trim()); } protected Socket newSocket() throws IOException { Socket socket = new Socket("localhost", proxyConnector.getLocalPort()); socket.setSoTimeout(5000); return socket; } protected class Response { private final String code; private final Map<String, String> headers; private final String body; private Response(String code, Map<String, String> headers, String body) { this.code = code; this.headers = headers; this.body = body; } public String getCode() { return code; } public Map<String, String> getHeaders() { return headers; } public String getBody() { return body; } @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append(code).append("\r\n"); for (Map.Entry<String, String> entry : headers.entrySet()) builder.append(entry.getKey()).append(": ").append(entry.getValue()).append("\r\n"); builder.append("\r\n"); builder.append(body); return builder.toString(); } } }
thomasbecker/jetty-7
jetty-server/src/test/java/org/eclipse/jetty/server/handler/AbstractConnectHandlerTest.java
Java
apache-2.0
5,030
package org.apache.velocity.tools.view; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /** * <p>ToolInfo implementation to handle "primitive" data types. * It currently supports String, Number, and Boolean data.</p> * * <p>An example of data elements specified in your toolbox.xml * might be: * <pre> * &lt;data type="string"&gt; * &lt;key&gt;app_name&lt;/key&gt; * &lt;value&gt;FooWeb Deluxe&lt;/value&gt; * &lt;/data&gt; * &lt;data type="number"&gt; * &lt;key&gt;app_version&lt;/key&gt; * &lt;value&gt;4.2&lt;/value&gt; * &lt;/data&gt; * &lt;data type="boolean"&gt; * &lt;key&gt;debug&lt;/key&gt; * &lt;value&gt;true&lt;/value&gt; * &lt;/data&gt; * &lt;data type="number"&gt; * &lt;key&gt;screen_width&lt;/key&gt; * &lt;value&gt;400&lt;/value&gt; * &lt;/data&gt; * </pre></p> * * @author Nathan Bubna * @deprecated Use {@link org.apache.velocity.tools.config.Data} * @version $Id: DataInfo.java 651469 2008-04-25 00:46:13Z nbubna $ */ @Deprecated public class DataInfo implements ToolInfo { public static final String TYPE_STRING = "string"; public static final String TYPE_NUMBER = "number"; public static final String TYPE_BOOLEAN = "boolean"; private static final int TYPE_ID_STRING = 0; private static final int TYPE_ID_NUMBER = 1; private static final int TYPE_ID_BOOLEAN = 2; private String key = null; private int type_id = TYPE_ID_STRING; private Object data = null; public DataInfo() {} /*********************** Mutators *************************/ public void setKey(String key) { this.key = key; } public void setType(String type) { if (TYPE_BOOLEAN.equalsIgnoreCase(type)) { this.type_id = TYPE_ID_BOOLEAN; } else if (TYPE_NUMBER.equalsIgnoreCase(type)) { this.type_id = TYPE_ID_NUMBER; } else /* if no type or type="string" */ { this.type_id = TYPE_ID_STRING; } } public void setValue(String value) { if (type_id == TYPE_ID_BOOLEAN) { this.data = Boolean.valueOf(value); } else if (type_id == TYPE_ID_NUMBER) { if (value.indexOf('.') >= 0) { this.data = new Double(value); } else { this.data = new Integer(value); } } else /* type is "string" */ { this.data = value; } } /*********************** Accessors *************************/ public String getKey() { return key; } public String getClassname() { return data != null ? data.getClass().getName() : null; } /** * Returns the data. Always returns the same * object since the data is a constant. Initialization * data is ignored. */ public Object getInstance(Object initData) { return data; } }
fluidinfo/velocity-tools-packaging
src/main/java/org/apache/velocity/tools/view/DataInfo.java
Java
apache-2.0
3,790
// ---------------------------------------------------------------------------------- // // Copyright Microsoft Corporation // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ---------------------------------------------------------------------------------- using Microsoft.Azure.Commands.Compute.Common; using Microsoft.Azure.Commands.Compute.Models; using Microsoft.Azure.Commands.ResourceManager.Common.ArgumentCompleters; using Microsoft.Azure.Management.Compute; using System; using System.Management.Automation; namespace Microsoft.Azure.Commands.Compute { [Cmdlet("Get", ResourceManager.Common.AzureRMConstants.AzureRMPrefix + "VMAccessExtension"),OutputType(typeof(VirtualMachineAccessExtensionContext))] public class GetAzureVMAccessExtensionCommand : VirtualMachineExtensionBaseCmdlet { [Parameter( Mandatory = true, Position = 0, ValueFromPipelineByPropertyName = true, HelpMessage = "The resource group name.")] [ResourceGroupCompleter()] [ValidateNotNullOrEmpty] public string ResourceGroupName { get; set; } [Alias("ResourceName")] [Parameter( Mandatory = true, Position = 1, ValueFromPipelineByPropertyName = true, HelpMessage = "The virtual machine name.")] [ResourceNameCompleter("Microsoft.Compute/virtualMachines", "ResourceGroupName")] [ValidateNotNullOrEmpty] public string VMName { get; set; } [Alias("ExtensionName")] [Parameter( Mandatory = true, Position = 2, ValueFromPipelineByPropertyName = true, HelpMessage = "The extension name.")] [ResourceNameCompleter("Microsoft.Compute/virtualMachines/extensions", "ResourceGroupName", "VMName")] [ValidateNotNullOrEmpty] public string Name { get; set; } [Parameter( Position = 3, ValueFromPipelineByPropertyName = true, HelpMessage = "To show the status.")] [ValidateNotNullOrEmpty] public SwitchParameter Status { get; set; } public override void ExecuteCmdlet() { base.ExecuteCmdlet(); ExecuteClientAction(() => { if (Status.IsPresent) { var result = this.VirtualMachineExtensionClient.GetWithInstanceView(this.ResourceGroupName, this.VMName, this.Name); var returnedExtension = result.ToPSVirtualMachineExtension(this.ResourceGroupName, this.VMName); if (returnedExtension.Publisher.Equals(VirtualMachineAccessExtensionContext.ExtensionDefaultPublisher, StringComparison.InvariantCultureIgnoreCase) && returnedExtension.ExtensionType.Equals(VirtualMachineAccessExtensionContext.ExtensionDefaultName, StringComparison.InvariantCultureIgnoreCase)) { WriteObject(new VirtualMachineAccessExtensionContext(returnedExtension)); } else { WriteObject(null); } } else { var result = this.VirtualMachineExtensionClient.Get(this.ResourceGroupName, this.VMName, this.Name); var returnedExtension = result.ToPSVirtualMachineExtension(this.ResourceGroupName, this.VMName); if (returnedExtension.Publisher.Equals(VirtualMachineAccessExtensionContext.ExtensionDefaultPublisher, StringComparison.InvariantCultureIgnoreCase) && returnedExtension.ExtensionType.Equals(VirtualMachineAccessExtensionContext.ExtensionDefaultName, StringComparison.InvariantCultureIgnoreCase)) { WriteObject(new VirtualMachineAccessExtensionContext(returnedExtension)); } else { WriteObject(null); } } }); } } }
ClogenyTechnologies/azure-powershell
src/ResourceManager/Compute/Commands.Compute/Extension/VMAccess/GetAzureVMAccessExtension.cs
C#
apache-2.0
4,615
/* * Licensed to Diennea S.r.l. under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Diennea S.r.l. licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package docet.engine; import java.util.Arrays; /** * * */ public enum DocetDocFormat { TYPE_HTML("html", false), TYPE_PDF("pdf", true); private String name; private boolean includeResources; private DocetDocFormat(final String name, final boolean includeResources) { this.name = name; this.includeResources = includeResources; } @Override public String toString() { return this.name; } public boolean isIncludeResources() { return this.includeResources; } public static DocetDocFormat parseDocetRequestByName(final String name) { return Arrays.asList(DocetDocFormat.values()) .stream() .filter(req -> req.toString().equals(name)).findFirst().orElse(null); } }
diegosalvi/docetproject
docet-core/src/main/java/docet/engine/DocetDocFormat.java
Java
apache-2.0
1,574
// Copyright 2021 The Oppia Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS-IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Component for delete account modal. */ import { Component } from '@angular/core'; import { OnInit } from '@angular/core'; import { UserService } from 'services/user.service'; import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap'; @Component({ selector: 'oppia-delete-account-modal', templateUrl: './delete-account-modal.component.html' }) export class DeleteAccountModalComponent implements OnInit { expectedUsername: string; username: string; constructor( private userService: UserService, private ngbActiveModal: NgbActiveModal, ) {} ngOnInit(): void { this.expectedUsername = null; this.userService.getUserInfoAsync().then((userInfo) => { this.expectedUsername = userInfo.getUsername(); }); } isValid(): boolean { return this.username === this.expectedUsername; } confirm(): void { this.ngbActiveModal.close(); } cancel(): void { this.ngbActiveModal.dismiss(); } }
brianrodri/oppia
core/templates/pages/delete-account-page/templates/delete-account-modal.component.ts
TypeScript
apache-2.0
1,586
/* * Javolution - Java(TM) Solution for Real-Time and Embedded Systems * Copyright (C) 2007 - Javolution (http://javolution.org/) * All rights reserved. * * Permission to use, copy, modify, and distribute this software is * freely granted, provided that this notice is preserved. */ package javolution.xml; import java.io.Serializable; /** * <p> This interface identifies classes supporting XML serialization * (XML serialization is still possible for classes not implementing this * interface through dynamic {@link XMLBinding} though).</p> * * <p> Typically, classes implementing this interface have a protected static * {@link XMLFormat} holding their default XML representation. * For example:[code] * public final class Complex implements XMLSerializable { * * // Use the cartesien form for the default XML representation. * protected static final XMLFormat<Complex> XML = new XMLFormat<Complex>(Complex.class) { * public Complex newInstance(Class<Complex> cls, InputElement xml) throws XMLStreamException { * return Complex.valueOf(xml.getAttribute("real", 0.0), * xml.getAttribute("imaginary", 0.0)); * } * public void write(Complex complex, OutputElement xml) throws XMLStreamException { * xml.setAttribute("real", complex.getReal()); * xml.setAttribute("imaginary", complex.getImaginary()); * } * public void read(InputElement xml, Complex complex) { * // Immutable, deserialization occurs at creation, ref. newIntance(...) * } * }; * ... * }[/code]</p> * * @author <a href="mailto:jean-marie@dautelle.com">Jean-Marie Dautelle</a> * @version 4.2, April 15, 2007 */ public interface XMLSerializable extends Serializable { // No method. Tagging interface. }
mariusj/org.openntf.domino
domino/externals/javolution/src/main/java/javolution/xml/XMLSerializable.java
Java
apache-2.0
1,977
# Copyright 2008-2013 Software freedom conservancy # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """WebElement implementation.""" import hashlib import os import zipfile try: from StringIO import StringIO as IOStream except ImportError: # 3+ from io import BytesIO as IOStream import base64 from .command import Command from selenium.common.exceptions import WebDriverException from selenium.common.exceptions import InvalidSelectorException from selenium.webdriver.common.by import By from selenium.webdriver.common.keys import Keys try: str = basestring except NameError: pass class WebElement(object): """Represents an HTML element. Generally, all interesting operations to do with interacting with a page will be performed through this interface.""" def __init__(self, parent, id_): self._parent = parent self._id = id_ @property def tag_name(self): """Gets this element's tagName property.""" return self._execute(Command.GET_ELEMENT_TAG_NAME)['value'] @property def text(self): """Gets the text of the element.""" return self._execute(Command.GET_ELEMENT_TEXT)['value'] def click(self): """Clicks the element.""" self._execute(Command.CLICK_ELEMENT) def submit(self): """Submits a form.""" self._execute(Command.SUBMIT_ELEMENT) def clear(self): """Clears the text if it's a text entry element.""" self._execute(Command.CLEAR_ELEMENT) def get_attribute(self, name): """Gets the attribute value. :Args: - name - name of the attribute property to retieve. Example:: # Check if the 'active' css class is applied to an element. is_active = "active" in target_element.get_attribute("class") """ resp = self._execute(Command.GET_ELEMENT_ATTRIBUTE, {'name': name}) attributeValue = '' if resp['value'] is None: attributeValue = None else: attributeValue = resp['value'] if name != 'value' and attributeValue.lower() in ('true', 'false'): attributeValue = attributeValue.lower() return attributeValue def is_selected(self): """Whether the element is selected. Can be used to check if a checkbox or radio button is selected. """ return self._execute(Command.IS_ELEMENT_SELECTED)['value'] def is_enabled(self): """Whether the element is enabled.""" return self._execute(Command.IS_ELEMENT_ENABLED)['value'] def find_element_by_id(self, id_): """Finds element within the child elements of this element. :Args: - id_ - ID of child element to locate. """ return self.find_element(by=By.ID, value=id_) def find_elements_by_id(self, id_): """Finds a list of elements within the children of this element with the matching ID. :Args: - id_ - Id of child element to find. """ return self.find_elements(by=By.ID, value=id_) def find_element_by_name(self, name): """Find element with in this element's children by name. :Args: - name - name property of the element to find. """ return self.find_element(by=By.NAME, value=name) def find_elements_by_name(self, name): """Finds a list of elements with in this element's children by name. :Args: - name - name property to search for. """ return self.find_elements(by=By.NAME, value=name) def find_element_by_link_text(self, link_text): """Finds element with in this element's children by visible link text. :Args: - link_text - Link text string to search for. """ return self.find_element(by=By.LINK_TEXT, value=link_text) def find_elements_by_link_text(self, link_text): """Finds a list of elements with in this element's children by visible link text. :Args: - link_text - Link text string to search for. """ return self.find_elements(by=By.LINK_TEXT, value=link_text) def find_element_by_partial_link_text(self, link_text): """Finds element with in this element's children by parial visible link text. :Args: - link_text - Link text string to search for. """ return self.find_element(by=By.PARTIAL_LINK_TEXT, value=link_text) def find_elements_by_partial_link_text(self, link_text): """Finds a list of elements with in this element's children by link text. :Args: - link_text - Link text string to search for. """ return self.find_elements(by=By.PARTIAL_LINK_TEXT, value=link_text) def find_element_by_tag_name(self, name): """Finds element with in this element's children by tag name. :Args: - name - name of html tag (eg: h1, a, span) """ return self.find_element(by=By.TAG_NAME, value=name) def find_elements_by_tag_name(self, name): """Finds a list of elements with in this element's children by tag name. :Args: - name - name of html tag (eg: h1, a, span) """ return self.find_elements(by=By.TAG_NAME, value=name) def find_element_by_xpath(self, xpath): """Finds element by xpath. :Args: xpath - xpath of element to locate. "//input[@class='myelement']" Note: The base path will be relative to this element's location. This will select the first link under this element.:: myelement.find_elements_by_xpath(".//a") However, this will select the first link on the page. myelement.find_elements_by_xpath("//a") """ return self.find_element(by=By.XPATH, value=xpath) def find_elements_by_xpath(self, xpath): """Finds elements within the elements by xpath. :Args: - xpath - xpath locator string. Note: The base path will be relative to this element's location. This will select all links under this element.:: myelement.find_elements_by_xpath(".//a") However, this will select all links in the page itself. myelement.find_elements_by_xpath("//a") """ return self.find_elements(by=By.XPATH, value=xpath) def find_element_by_class_name(self, name): """Finds an element within this element's children by their class name. :Args: - name - class name to search on. """ return self.find_element(by=By.CLASS_NAME, value=name) def find_elements_by_class_name(self, name): """Finds a list of elements within children of this element by their class name. :Args: - name - class name to search on. """ return self.find_elements(by=By.CLASS_NAME, value=name) def find_element_by_css_selector(self, css_selector): """Find and return an element that's a child of this element by CSS selector. :Args: - css_selector - CSS selctor string, ex: 'a.nav#home' """ return self.find_element(by=By.CSS_SELECTOR, value=css_selector) def find_elements_by_css_selector(self, css_selector): """Find and return list of multiple elements within the children of this element by CSS selector. :Args: - css_selector - CSS selctor string, ex: 'a.nav#home' """ return self.find_elements(by=By.CSS_SELECTOR, value=css_selector) def send_keys(self, *value): """Simulates typing into the element. :Args: - value - A string for typing, or setting form fields. For setting file inputs, this could be a local file path. Use this to send simple key events or to fill out form fields:: form_textfield = driver.find_element_by_name('username') form_textfield.send_keys("admin") This can also be used to set file inputs.:: file_input = driver.find_element_by_name('profilePic') file_input.send_keys("path/to/profilepic.gif") # Generally it's better to wrap the file path in one of the methods # in os.path to return the actual path to support cross OS testing. # file_input.send_keys(os.path.abspath("path/to/profilepic.gif")) """ # transfer file to another machine only if remote driver is used # the same behaviour as for java binding if self.parent._is_remote: local_file = LocalFileDetector.is_local_file(*value) if local_file is not None: value = self._upload(local_file) typing = [] for val in value: if isinstance(val, Keys): typing.append(val) elif isinstance(val, int): val = val.__str__() for i in range(len(val)): typing.append(val[i]) else: for i in range(len(val)): typing.append(val[i]) self._execute(Command.SEND_KEYS_TO_ELEMENT, {'value': typing}) # RenderedWebElement Items def is_displayed(self): """Whether the element would be visible to a user """ return self._execute(Command.IS_ELEMENT_DISPLAYED)['value'] @property def location_once_scrolled_into_view(self): """CONSIDERED LIABLE TO CHANGE WITHOUT WARNING. Use this to discover where on the screen an element is so that we can click it. This method should cause the element to be scrolled into view. Returns the top lefthand corner location on the screen, or None if the element is not visible""" return self._execute(Command.GET_ELEMENT_LOCATION_ONCE_SCROLLED_INTO_VIEW)['value'] @property def size(self): """ Returns the size of the element """ size = self._execute(Command.GET_ELEMENT_SIZE)['value'] new_size = {} new_size["height"] = size["height"] new_size["width"] = size["width"] return new_size def value_of_css_property(self, property_name): """ Returns the value of a CSS property """ return self._execute(Command.GET_ELEMENT_VALUE_OF_CSS_PROPERTY, {'propertyName': property_name})['value'] @property def location(self): """ Returns the location of the element in the renderable canvas""" old_loc = self._execute(Command.GET_ELEMENT_LOCATION)['value'] new_loc = {"x": old_loc['x'], "y": old_loc['y']} return new_loc @property def rect(self): """ Returns a dictionary with the size and location of the element""" return self._execute(Command.GET_ELEMENT_RECT)['value'] @property def parent(self): """ Returns parent element is available. """ return self._parent @property def id(self): """ Returns internal id used by selenium. This is mainly for internal use. Simple use cases such as checking if 2 webelements refer to the same element, can be done using '==':: if element1 == element2: print("These 2 are equal") """ return self._id def __eq__(self, element): if self._id == element.id: return True else: return self._execute(Command.ELEMENT_EQUALS, {'other': element.id})['value'] # Private Methods def _execute(self, command, params=None): """Executes a command against the underlying HTML element. Args: command: The name of the command to _execute as a string. params: A dictionary of named parameters to send with the command. Returns: The command's JSON response loaded into a dictionary object. """ if not params: params = {} params['id'] = self._id return self._parent.execute(command, params) def find_element(self, by=By.ID, value=None): if not By.is_valid(by) or not isinstance(value, str): raise InvalidSelectorException("Invalid locator values passed in") return self._execute(Command.FIND_CHILD_ELEMENT, {"using": by, "value": value})['value'] def find_elements(self, by=By.ID, value=None): if not By.is_valid(by) or not isinstance(value, str): raise InvalidSelectorException("Invalid locator values passed in") return self._execute(Command.FIND_CHILD_ELEMENTS, {"using": by, "value": value})['value'] def __hash__(self): return int(hashlib.md5(self._id.encode('utf-8')).hexdigest(), 16) def _upload(self, filename): fp = IOStream() zipped = zipfile.ZipFile(fp, 'w', zipfile.ZIP_DEFLATED) zipped.write(filename, os.path.split(filename)[1]) zipped.close() content = base64.encodestring(fp.getvalue()) if not isinstance(content, str): content = content.decode('utf-8') try: return self._execute(Command.UPLOAD_FILE, {'file': content})['value'] except WebDriverException as e: if "Unrecognized command: POST" in e.__str__(): return filename elif "Command not found: POST " in e.__str__(): return filename elif '{"status":405,"value":["GET","HEAD","DELETE"]}' in e.__str__(): return filename else: raise e class LocalFileDetector(object): @classmethod def is_local_file(cls, *keys): file_path = '' typing = [] for val in keys: if isinstance(val, Keys): typing.append(val) elif isinstance(val, int): val = val.__str__() for i in range(len(val)): typing.append(val[i]) else: for i in range(len(val)): typing.append(val[i]) file_path = ''.join(typing) if file_path is '': return None try: if os.path.isfile(file_path): return file_path except: pass return None
RamaraoDonta/ramarao-clone
py/selenium/webdriver/remote/webelement.py
Python
apache-2.0
14,925
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #include <aws/location/model/SearchForTextResult.h> #include <aws/core/utils/json/JsonSerializer.h> #include <utility> using namespace Aws::Utils::Json; using namespace Aws::Utils; namespace Aws { namespace LocationService { namespace Model { SearchForTextResult::SearchForTextResult() : m_placeHasBeenSet(false) { } SearchForTextResult::SearchForTextResult(JsonView jsonValue) : m_placeHasBeenSet(false) { *this = jsonValue; } SearchForTextResult& SearchForTextResult::operator =(JsonView jsonValue) { if(jsonValue.ValueExists("Place")) { m_place = jsonValue.GetObject("Place"); m_placeHasBeenSet = true; } return *this; } JsonValue SearchForTextResult::Jsonize() const { JsonValue payload; if(m_placeHasBeenSet) { payload.WithObject("Place", m_place.Jsonize()); } return payload; } } // namespace Model } // namespace LocationService } // namespace Aws
jt70471/aws-sdk-cpp
aws-cpp-sdk-location/source/model/SearchForTextResult.cpp
C++
apache-2.0
1,031
"""This component provides support to the Ring Door Bell camera.""" import asyncio from datetime import timedelta import logging import voluptuous as vol from homeassistant.components.camera import PLATFORM_SCHEMA, Camera from homeassistant.components.ffmpeg import DATA_FFMPEG from homeassistant.const import ATTR_ATTRIBUTION, CONF_SCAN_INTERVAL from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream from homeassistant.util import dt as dt_util from . import ATTRIBUTION, DATA_RING, NOTIFICATION_ID CONF_FFMPEG_ARGUMENTS = 'ffmpeg_arguments' FORCE_REFRESH_INTERVAL = timedelta(minutes=45) _LOGGER = logging.getLogger(__name__) NOTIFICATION_TITLE = 'Ring Camera Setup' SCAN_INTERVAL = timedelta(seconds=90) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_FFMPEG_ARGUMENTS): cv.string, vol.Optional(CONF_SCAN_INTERVAL, default=SCAN_INTERVAL): cv.time_period, }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up a Ring Door Bell and StickUp Camera.""" ring = hass.data[DATA_RING] cams = [] cams_no_plan = [] for camera in ring.doorbells: if camera.has_subscription: cams.append(RingCam(hass, camera, config)) else: cams_no_plan.append(camera) for camera in ring.stickup_cams: if camera.has_subscription: cams.append(RingCam(hass, camera, config)) else: cams_no_plan.append(camera) # show notification for all cameras without an active subscription if cams_no_plan: cameras = str(', '.join([camera.name for camera in cams_no_plan])) err_msg = '''A Ring Protect Plan is required for the''' \ ''' following cameras: {}.'''.format(cameras) _LOGGER.error(err_msg) hass.components.persistent_notification.create( 'Error: {}<br />' 'You will need to restart hass after fixing.' ''.format(err_msg), title=NOTIFICATION_TITLE, notification_id=NOTIFICATION_ID) add_entities(cams, True) return True class RingCam(Camera): """An implementation of a Ring Door Bell camera.""" def __init__(self, hass, camera, device_info): """Initialize a Ring Door Bell camera.""" super(RingCam, self).__init__() self._camera = camera self._hass = hass self._name = self._camera.name self._ffmpeg = hass.data[DATA_FFMPEG] self._ffmpeg_arguments = device_info.get(CONF_FFMPEG_ARGUMENTS) self._last_video_id = self._camera.last_recording_id self._video_url = self._camera.recording_url(self._last_video_id) self._utcnow = dt_util.utcnow() self._expires_at = FORCE_REFRESH_INTERVAL + self._utcnow @property def name(self): """Return the name of this camera.""" return self._name @property def unique_id(self): """Return a unique ID.""" return self._camera.id @property def device_state_attributes(self): """Return the state attributes.""" return { ATTR_ATTRIBUTION: ATTRIBUTION, 'device_id': self._camera.id, 'firmware': self._camera.firmware, 'kind': self._camera.kind, 'timezone': self._camera.timezone, 'type': self._camera.family, 'video_url': self._video_url, } async def async_camera_image(self): """Return a still image response from the camera.""" from haffmpeg.tools import ImageFrame, IMAGE_JPEG ffmpeg = ImageFrame(self._ffmpeg.binary, loop=self.hass.loop) if self._video_url is None: return image = await asyncio.shield(ffmpeg.get_image( self._video_url, output_format=IMAGE_JPEG, extra_cmd=self._ffmpeg_arguments)) return image async def handle_async_mjpeg_stream(self, request): """Generate an HTTP MJPEG stream from the camera.""" from haffmpeg.camera import CameraMjpeg if self._video_url is None: return stream = CameraMjpeg(self._ffmpeg.binary, loop=self.hass.loop) await stream.open_camera( self._video_url, extra_cmd=self._ffmpeg_arguments) try: stream_reader = await stream.get_reader() return await async_aiohttp_proxy_stream( self.hass, request, stream_reader, self._ffmpeg.ffmpeg_stream_content_type) finally: await stream.close() @property def should_poll(self): """Update the image periodically.""" return True def update(self): """Update camera entity and refresh attributes.""" _LOGGER.debug("Checking if Ring DoorBell needs to refresh video_url") self._camera.update() self._utcnow = dt_util.utcnow() try: last_event = self._camera.history(limit=1)[0] except (IndexError, TypeError): return last_recording_id = last_event['id'] video_status = last_event['recording']['status'] if video_status == 'ready' and \ (self._last_video_id != last_recording_id or self._utcnow >= self._expires_at): video_url = self._camera.recording_url(last_recording_id) if video_url: _LOGGER.info("Ring DoorBell properties refreshed") # update attributes if new video or if URL has expired self._last_video_id = last_recording_id self._video_url = video_url self._expires_at = FORCE_REFRESH_INTERVAL + self._utcnow
jabesq/home-assistant
homeassistant/components/ring/camera.py
Python
apache-2.0
5,739
/** * Copyright 2014 * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * * @project loon * @author cping * @email:javachenpeng@yahoo.com * @version 0.4.2 */ package loon.core.graphics.component.table; import loon.core.graphics.LComponent; import loon.core.graphics.LContainer; import loon.core.graphics.device.LColor; import loon.core.graphics.opengl.GLEx; import loon.core.graphics.opengl.LTexture; import loon.utils.collection.ArrayList; public class TableLayout extends LContainer { private TableLayoutRow[] tableRows; private boolean grid = true; public TableLayout(int x, int y, int w, int h) { this(x, y, w, h, 4, 4); } public TableLayout(int x, int y, int w, int h, int cols, int rows) { super(x, y, w, h); prepareTable(cols, rows); } protected void renderComponents(GLEx g) { for (int i = 0; i < getComponentCount(); i++) { getComponents()[i].createUI(g); } if (grid) { for (int i = 0; i < tableRows.length; i++) { tableRows[i].paint(g); } g.drawRect(getX(), getY(), getWidth(), getHeight(), LColor.gray); } } @Override public void createUI(GLEx g, int x, int y, LComponent component, LTexture[] buttonImage) { } private void prepareTable(int cols, int rows) { tableRows = new TableLayoutRow[rows]; if (rows > 0 && cols > 0) { int rowHeight = getHeight() / rows; for (int i = 0; i < rows; i++) { tableRows[i] = new TableLayoutRow(x(), y() + (i * rowHeight), getWidth(), rowHeight, cols); } } } public void setComponent(LComponent component, int col, int row) { add(component); remove(tableRows[row].getComponent(col)); tableRows[row].setComponent(component, col); } public void removeComponent(int col, int row) { remove(tableRows[row].getComponent(col)); tableRows[row].setComponent(null, col); } public void addRow(int column, int position) { ArrayList newRows = new ArrayList(); int newRowHeight = getHeight() / (tableRows.length + 1); if (canAddRow(newRowHeight)) { if (position == 0) { newRows.add(new TableLayoutRow(x(), y(), getWidth(), newRowHeight, column)); } for (int i = 0; i < tableRows.length; i++) { if (i == position && position != 0) { newRows.add(new TableLayoutRow(x(), y(), getWidth(), newRowHeight, column)); } newRows.add(tableRows[i]); } if (position == tableRows.length && position != 0) { newRows.add(new TableLayoutRow(x(), y(), getWidth(), newRowHeight, column)); } for (int i = 0; i < newRows.size(); i++) { ((TableLayoutRow) newRows.get(i)) .setY(y() + (i * newRowHeight)); ((TableLayoutRow) newRows.get(i)).setHeight(newRowHeight); } tableRows = (TableLayoutRow[]) newRows.toArray(); } } public void addRow(int column) { addRow(column, tableRows.length); } private boolean canAddRow(int newRowHeight) { if (tableRows != null && tableRows.length > 0) { return tableRows[0].canSetHeight(newRowHeight); } return true; } public boolean setColumnWidth(int width, int col, int row) { return tableRows[row].setColumnWidth(width, col); } public boolean setColumnHeight(int height, int row) { if (!tableRows[row].canSetHeight(height)) { return false; } tableRows[row].setHeight(height); return true; } public void setMargin(int leftMargin, int rightMargin, int topMargin, int bottomMargin, int col, int row) { tableRows[row].getColumn(col).setMargin(leftMargin, rightMargin, topMargin, bottomMargin); } public void setAlignment(int horizontalAlignment, int verticalAlignment, int col, int row) { tableRows[row].getColumn(col).setHorizontalAlignment( horizontalAlignment); tableRows[row].getColumn(col).setVerticalAlignment(verticalAlignment); } public int getRows() { return tableRows.length; } public int getColumns(int row) { return tableRows[row].getCoulumnSize(); } @Override public void setWidth(int width) { boolean couldShrink = true; for (int i = 0; i < tableRows.length; i++) { if (!tableRows[i].setWidth(width)) { couldShrink = false; } } if (couldShrink) { super.setWidth(width); } } @Override public void setHeight(int height) { super.setHeight(height); for (int i = 0; i < tableRows.length; i++) { tableRows[i].setHeight(height); } } public boolean isGrid() { return grid; } public void setGrid(boolean grid) { this.grid = grid; } @Override public String getUIName() { return "TableLayout"; } }
cping/LGame
Java/old/OpenGL-1.0(old_ver)/Loon-backend-JavaSE/src/loon/core/graphics/component/table/TableLayout.java
Java
apache-2.0
4,975
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.debugger.engine.evaluation.expression; import com.intellij.debugger.DebuggerBundle; import com.intellij.debugger.engine.evaluation.EvaluateException; import com.intellij.debugger.engine.evaluation.EvaluateExceptionUtil; import com.intellij.debugger.engine.evaluation.EvaluateRuntimeException; import com.intellij.debugger.jdi.VirtualMachineProxyImpl; import com.intellij.openapi.diagnostic.Logger; import java.util.HashMap; import com.sun.jdi.Value; import java.util.Map; /** * @author lex */ public class CodeFragmentEvaluator extends BlockStatementEvaluator{ private static final Logger LOG = Logger.getInstance("#com.intellij.debugger.engine.evaluation.expression.CodeFragmentEvaluator"); private final CodeFragmentEvaluator myParentFragmentEvaluator; private final Map<String, Object> mySyntheticLocals = new HashMap<>(); public CodeFragmentEvaluator(CodeFragmentEvaluator parentFragmentEvaluator) { super(null); myParentFragmentEvaluator = parentFragmentEvaluator; } public void setStatements(Evaluator[] evaluators) { myStatements = evaluators; } public Value getValue(String localName, VirtualMachineProxyImpl vm) throws EvaluateException { if(!mySyntheticLocals.containsKey(localName)) { if(myParentFragmentEvaluator != null){ return myParentFragmentEvaluator.getValue(localName, vm); } else { throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.variable.not.declared", localName)); } } Object value = mySyntheticLocals.get(localName); if(value instanceof Value) { return (Value)value; } else if(value == null) { return null; } else if(value instanceof Boolean) { return vm.mirrorOf(((Boolean)value).booleanValue()); } else if(value instanceof Byte) { return vm.mirrorOf(((Byte)value).byteValue()); } else if(value instanceof Character) { return vm.mirrorOf(((Character)value).charValue()); } else if(value instanceof Short) { return vm.mirrorOf(((Short)value).shortValue()); } else if(value instanceof Integer) { return vm.mirrorOf(((Integer)value).intValue()); } else if(value instanceof Long) { return vm.mirrorOf(((Long)value).longValue()); } else if(value instanceof Float) { return vm.mirrorOf(((Float)value).floatValue()); } else if(value instanceof Double) { return vm.mirrorOf(((Double)value).doubleValue()); } else if(value instanceof String) { return vm.mirrorOf((String)value); } else { LOG.error("unknown default initializer type " + value.getClass().getName()); return null; } } private boolean hasValue(String localName) { if(!mySyntheticLocals.containsKey(localName)) { if(myParentFragmentEvaluator != null){ return myParentFragmentEvaluator.hasValue(localName); } else { return false; } } else { return true; } } public void setInitialValue(String localName, Object value) { LOG.assertTrue(!(value instanceof Value), "use setValue for jdi values"); if(hasValue(localName)) { throw new EvaluateRuntimeException( EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.variable.already.declared", localName))); } mySyntheticLocals.put(localName, value); } public void setValue(String localName, Value value) throws EvaluateException { if(!mySyntheticLocals.containsKey(localName)) { if(myParentFragmentEvaluator != null){ myParentFragmentEvaluator.setValue(localName, value); } else { throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.variable.not.declared", localName)); } } else { mySyntheticLocals.put(localName, value); } } }
mglukhikh/intellij-community
java/debugger/impl/src/com/intellij/debugger/engine/evaluation/expression/CodeFragmentEvaluator.java
Java
apache-2.0
4,502
package com.kit.imagelib.imagelooker; public interface OnPageSelectedListener { public void onPageSelected(); }
BigAppOS/BigApp_Discuz_Android
libs/ImageLib/src/com/kit/imagelib/imagelooker/OnPageSelectedListener.java
Java
apache-2.0
126
package matchers import ( "fmt" "github.com/onsi/gomega/format" "math" ) type BeNumericallyMatcher struct { Comparator string CompareTo []interface{} } func (matcher *BeNumericallyMatcher) Match(actual interface{}) (success bool, message string, err error) { if len(matcher.CompareTo) == 0 || len(matcher.CompareTo) > 2 { return false, "", fmt.Errorf("BeNumerically requires 1 or 2 CompareTo arguments. Got:\n%s", format.Object(matcher.CompareTo, 1)) } if !isNumber(actual) { return false, "", fmt.Errorf("Expected a number. Got:\n%s", format.Object(actual, 1)) } if !isNumber(matcher.CompareTo[0]) { return false, "", fmt.Errorf("Expected a number. Got:\n%s", format.Object(matcher.CompareTo[0], 1)) } if len(matcher.CompareTo) == 2 && !isNumber(matcher.CompareTo[1]) { return false, "", fmt.Errorf("Expected a number. Got:\n%s", format.Object(matcher.CompareTo[0], 1)) } switch matcher.Comparator { case "==", "~", ">", ">=", "<", "<=": default: return false, "", fmt.Errorf("Unknown comparator: %s", matcher.Comparator) } if isFloat(actual) || isFloat(matcher.CompareTo[0]) { var secondOperand float64 = 1e-8 if len(matcher.CompareTo) == 2 { secondOperand = toFloat(matcher.CompareTo[1]) } success = matcher.matchFloats(toFloat(actual), toFloat(matcher.CompareTo[0]), secondOperand) } else if isInteger(actual) { var secondOperand int64 = 0 if len(matcher.CompareTo) == 2 { secondOperand = toInteger(matcher.CompareTo[1]) } success = matcher.matchIntegers(toInteger(actual), toInteger(matcher.CompareTo[0]), secondOperand) } else if isUnsignedInteger(actual) { var secondOperand uint64 = 0 if len(matcher.CompareTo) == 2 { secondOperand = toUnsignedInteger(matcher.CompareTo[1]) } success = matcher.matchUnsignedIntegers(toUnsignedInteger(actual), toUnsignedInteger(matcher.CompareTo[0]), secondOperand) } else { return false, "", fmt.Errorf("Failed to compare:\n%s\n%s:\n%s", format.Object(actual, 1), matcher.Comparator, format.Object(matcher.CompareTo[0], 1)) } if success { return true, format.Message(actual, fmt.Sprintf("not to be %s", matcher.Comparator), matcher.CompareTo[0]), nil } else { return false, format.Message(actual, fmt.Sprintf("to be %s", matcher.Comparator), matcher.CompareTo[0]), nil } } func (matcher *BeNumericallyMatcher) matchIntegers(actual, compareTo, threshold int64) (success bool) { switch matcher.Comparator { case "==", "~": diff := actual - compareTo return -threshold <= diff && diff <= threshold case ">": return (actual > compareTo) case ">=": return (actual >= compareTo) case "<": return (actual < compareTo) case "<=": return (actual <= compareTo) } return false } func (matcher *BeNumericallyMatcher) matchUnsignedIntegers(actual, compareTo, threshold uint64) (success bool) { switch matcher.Comparator { case "==", "~": if actual < compareTo { actual, compareTo = compareTo, actual } return actual-compareTo <= threshold case ">": return (actual > compareTo) case ">=": return (actual >= compareTo) case "<": return (actual < compareTo) case "<=": return (actual <= compareTo) } return false } func (matcher *BeNumericallyMatcher) matchFloats(actual, compareTo, threshold float64) (success bool) { switch matcher.Comparator { case "~": return math.Abs(actual-compareTo) <= threshold case "==": return (actual == compareTo) case ">": return (actual > compareTo) case ">=": return (actual >= compareTo) case "<": return (actual < compareTo) case "<=": return (actual <= compareTo) } return false }
aminjam/onememberoauth2
Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_numerically_matcher.go
GO
apache-2.0
3,596
/******************************************************************************* * Copyright 2015 Unicon (R) Licensed under the * Educational Community License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may * obtain a copy of the License at * * http://www.osedu.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing * permissions and limitations under the License. *******************************************************************************/ /** * */ package org.apereo.lai; import java.io.Serializable; /** * @author ggilbert * */ public interface Institution extends Serializable { String getName(); String getKey(); String getSecret(); }
ccooper1/OpenDash
src/main/java/org/apereo/lai/Institution.java
Java
apache-2.0
967
var spawn = require('child_process').spawn var port = exports.port = 1337 exports.registry = "http://localhost:" + port exports.run = run function run (cmd, t, opts, cb) { if (!opts) opts = {} if (!Array.isArray(cmd)) throw new Error("cmd must be an Array") if (!t || !t.end) throw new Error("node-tap instance is missing") var stdout = "" , stderr = "" , node = process.execPath , child = spawn(node, cmd, opts) child.stderr.on("data", function (chunk) { stderr += chunk }) child.stdout.on("data", function (chunk) { stdout += chunk }) child.on("close", function (code) { if (cb) cb(t, stdout, stderr, code, { cmd: cmd, opts: opts }) else t.end() }) }
jplusui/xfly
xfly/node/node_modules/npm/test/common-tap.js
JavaScript
apache-2.0
731
# # Author:: Adam Jacob (<adam@chef.io>) # Copyright:: Copyright 2008-2017, Chef Software Inc. # License:: Apache License, Version 2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # require "spec_helper" require "ostruct" describe Chef::Provider::Package::Apt do # XXX: sorry this is ugly and was done quickly to get 12.0.2 out, this file needs a rewrite to use # let blocks and shared examples [ Chef::Resource::Package, Chef::Resource::AptPackage ].each do |resource_klass| describe "when the new_resource is a #{resource_klass}" do before(:each) do @node = Chef::Node.new @events = Chef::EventDispatch::Dispatcher.new @run_context = Chef::RunContext.new(@node, {}, @events) @new_resource = resource_klass.new("irssi", @run_context) @status = double("Status", :exitstatus => 0) @provider = Chef::Provider::Package::Apt.new(@new_resource, @run_context) @stdin = StringIO.new @stdout = <<-PKG_STATUS irssi: Installed: (none) Candidate: 0.8.14-1ubuntu4 Version table: 0.8.14-1ubuntu4 0 500 http://us.archive.ubuntu.com/ubuntu/ lucid/main Packages PKG_STATUS @stderr = "" @shell_out = OpenStruct.new(:stdout => @stdout, :stdin => @stdin, :stderr => @stderr, :status => @status, :exitstatus => 0) @timeout = 900 end describe "when loading current resource" do it "should create a current resource with the name of the new_resource" do expect(@provider).to receive(:shell_out!).with( "apt-cache", "policy", @new_resource.package_name, :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ).and_return(@shell_out) @provider.load_current_resource current_resource = @provider.current_resource expect(current_resource).to be_a(Chef::Resource::Package) expect(current_resource.name).to eq("irssi") expect(current_resource.package_name).to eq("irssi") expect(current_resource.version).to eql([nil]) end it "should set the installed version if package has one" do @stdout.replace(<<-INSTALLED) sudo: Installed: 1.7.2p1-1ubuntu5.3 Candidate: 1.7.2p1-1ubuntu5.3 Version table: *** 1.7.2p1-1ubuntu5.3 0 500 http://us.archive.ubuntu.com/ubuntu/ lucid-updates/main Packages 500 http://security.ubuntu.com/ubuntu/ lucid-security/main Packages 100 /var/lib/dpkg/status 1.7.2p1-1ubuntu5 0 500 http://us.archive.ubuntu.com/ubuntu/ lucid/main Packages INSTALLED expect(@provider).to receive(:shell_out!).and_return(@shell_out) @provider.load_current_resource expect(@provider.current_resource.version).to eq(["1.7.2p1-1ubuntu5.3"]) expect(@provider.candidate_version).to eql(["1.7.2p1-1ubuntu5.3"]) end # it is the superclasses responsibility to throw most exceptions it "if the package does not exist in the cache sets installed + candidate version to nil" do @new_resource.package_name("conic-smarms") policy_out = <<-POLICY_STDOUT N: Unable to locate package conic-smarms POLICY_STDOUT policy = double(:stdout => policy_out, :exitstatus => 0) expect(@provider).to receive(:shell_out!).with( "apt-cache", "policy", "conic-smarms", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ).and_return(policy) showpkg_out = <<-SHOWPKG_STDOUT N: Unable to locate package conic-smarms SHOWPKG_STDOUT showpkg = double(:stdout => showpkg_out, :exitstatus => 0) expect(@provider).to receive(:shell_out!).with( "apt-cache", "showpkg", "conic-smarms", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ).and_return(showpkg) @provider.load_current_resource end # libmysqlclient-dev is a real package in newer versions of debian + ubuntu # list of virtual packages: http://www.debian.org/doc/packaging-manuals/virtual-package-names-list.txt it "should not install the virtual package there is a single provider package and it is installed" do @new_resource.package_name("libmysqlclient15-dev") virtual_package_out = <<-VPKG_STDOUT libmysqlclient15-dev: Installed: (none) Candidate: (none) Version table: VPKG_STDOUT virtual_package = double(:stdout => virtual_package_out, :exitstatus => 0) expect(@provider).to receive(:shell_out!).with( "apt-cache", "policy", "libmysqlclient15-dev", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ).and_return(virtual_package) showpkg_out = <<-SHOWPKG_STDOUT Package: libmysqlclient15-dev Versions: Reverse Depends: libmysqlclient-dev,libmysqlclient15-dev libmysqlclient-dev,libmysqlclient15-dev libmysqlclient-dev,libmysqlclient15-dev libmysqlclient-dev,libmysqlclient15-dev libmysqlclient-dev,libmysqlclient15-dev libmysqlclient-dev,libmysqlclient15-dev Dependencies: Provides: Reverse Provides: libmysqlclient-dev 5.1.41-3ubuntu12.7 libmysqlclient-dev 5.1.41-3ubuntu12.10 libmysqlclient-dev 5.1.41-3ubuntu12 SHOWPKG_STDOUT showpkg = double(:stdout => showpkg_out, :exitstatus => 0) expect(@provider).to receive(:shell_out!).with( "apt-cache", "showpkg", "libmysqlclient15-dev", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ).and_return(showpkg) real_package_out = <<-RPKG_STDOUT libmysqlclient-dev: Installed: 5.1.41-3ubuntu12.10 Candidate: 5.1.41-3ubuntu12.10 Version table: *** 5.1.41-3ubuntu12.10 0 500 http://us.archive.ubuntu.com/ubuntu/ lucid-updates/main Packages 100 /var/lib/dpkg/status 5.1.41-3ubuntu12.7 0 500 http://security.ubuntu.com/ubuntu/ lucid-security/main Packages 5.1.41-3ubuntu12 0 500 http://us.archive.ubuntu.com/ubuntu/ lucid/main Packages RPKG_STDOUT real_package = double(:stdout => real_package_out, :exitstatus => 0) expect(@provider).to receive(:shell_out!).with( "apt-cache", "policy", "libmysqlclient-dev", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ).and_return(real_package) @provider.load_current_resource end it "should raise an exception if you specify a virtual package with multiple provider packages" do @new_resource.package_name("mp3-decoder") virtual_package_out = <<-VPKG_STDOUT mp3-decoder: Installed: (none) Candidate: (none) Version table: VPKG_STDOUT virtual_package = double(:stdout => virtual_package_out, :exitstatus => 0) expect(@provider).to receive(:shell_out!).with( "apt-cache", "policy", "mp3-decoder", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ).and_return(virtual_package) showpkg_out = <<-SHOWPKG_STDOUT Package: mp3-decoder Versions: Reverse Depends: nautilus,mp3-decoder vux,mp3-decoder plait,mp3-decoder ecasound,mp3-decoder nautilus,mp3-decoder Dependencies: Provides: Reverse Provides: vlc-nox 1.0.6-1ubuntu1.8 vlc 1.0.6-1ubuntu1.8 vlc-nox 1.0.6-1ubuntu1 vlc 1.0.6-1ubuntu1 opencubicplayer 1:0.1.17-2 mpg321 0.2.10.6 mpg123 1.12.1-0ubuntu1 SHOWPKG_STDOUT showpkg = double(:stdout => showpkg_out, :exitstatus => 0) expect(@provider).to receive(:shell_out!).with( "apt-cache", "showpkg", "mp3-decoder", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ).and_return(showpkg) expect { @provider.load_current_resource }.to raise_error(Chef::Exceptions::Package) end it "should run apt-cache policy with the default_release option, if there is one on the resource" do @new_resource = Chef::Resource::AptPackage.new("irssi", @run_context) @provider = Chef::Provider::Package::Apt.new(@new_resource, @run_context) @new_resource.default_release("lenny-backports") @new_resource.provider(nil) expect(@provider).to receive(:shell_out!).with( "apt-cache", "-o", "APT::Default-Release=lenny-backports", "policy", "irssi", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ).and_return(@shell_out) @provider.load_current_resource end it "raises an exception if a source is specified (CHEF-5113)" do @new_resource.source "pluto" expect(@provider).to receive(:shell_out!).with( "apt-cache", "policy", @new_resource.package_name, :env => { "DEBIAN_FRONTEND" => "noninteractive" } , :timeout => @timeout ).and_return(@shell_out) expect { @provider.run_action(:install) }.to raise_error(Chef::Exceptions::Package) end end context "after loading the current resource" do before do @current_resource = resource_klass.new("irssi", @run_context) @provider.current_resource = @current_resource allow(@provider).to receive(:package_data).and_return({ "irssi" => { virtual: false, candidate_version: "0.8.12-7", installed_version: nil, }, "libmysqlclient15-dev" => { virtual: true, candidate_version: nil, installed_version: nil, }, }) end describe "install_package" do it "should run apt-get install with the package name and version" do expect(@provider).to receive(:shell_out!). with( "apt-get", "-q", "-y", "install", "irssi=0.8.12-7", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ) @provider.install_package(["irssi"], ["0.8.12-7"]) end it "should run apt-get install with the package name and version and options if specified" do expect(@provider).to receive(:shell_out!).with( "apt-get", "-q", "-y", "--force-yes", "install", "irssi=0.8.12-7", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ) @new_resource.options("--force-yes") @provider.install_package(["irssi"], ["0.8.12-7"]) end it "should run apt-get install with the package name and version and default_release if there is one and provider is explicitly defined" do @new_resource = nil @new_resource = Chef::Resource::AptPackage.new("irssi", @run_context) @new_resource.default_release("lenny-backports") @new_resource.provider = nil @provider.new_resource = @new_resource expect(@provider).to receive(:shell_out!).with( "apt-get", "-q", "-y", "-o", "APT::Default-Release=lenny-backports", "install", "irssi=0.8.12-7", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ) @provider.install_package(["irssi"], ["0.8.12-7"]) end it "should run apt-get install with the package name and quotes options if specified" do expect(@provider).to receive(:shell_out!).with( "apt-get", "-q", "-y", "--force-yes", "-o", "Dpkg::Options::=--force-confdef", "-o", "Dpkg::Options::=--force-confnew", "install", "irssi=0.8.12-7", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ) @new_resource.options('--force-yes -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confnew"') @provider.install_package(["irssi"], ["0.8.12-7"]) end end describe resource_klass, "upgrade_package" do it "should run install_package with the name and version" do expect(@provider).to receive(:install_package).with(["irssi"], ["0.8.12-7"]) @provider.upgrade_package(["irssi"], ["0.8.12-7"]) end end describe resource_klass, "remove_package" do it "should run apt-get remove with the package name" do expect(@provider).to receive(:shell_out!).with( "apt-get", "-q", "-y", "remove", "irssi", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ) @provider.remove_package(["irssi"], ["0.8.12-7"]) end it "should run apt-get remove with the package name and options if specified" do expect(@provider).to receive(:shell_out!).with( "apt-get", "-q", "-y", "--force-yes", "remove", "irssi", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ) @new_resource.options("--force-yes") @provider.remove_package(["irssi"], ["0.8.12-7"]) end end describe "when purging a package" do it "should run apt-get purge with the package name" do expect(@provider).to receive(:shell_out!).with( "apt-get", "-q", "-y", "purge", "irssi", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ) @provider.purge_package(["irssi"], ["0.8.12-7"]) end it "should run apt-get purge with the package name and options if specified" do expect(@provider).to receive(:shell_out!).with( "apt-get", "-q", "-y", "--force-yes", "purge", "irssi", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ) @new_resource.options("--force-yes") @provider.purge_package(["irssi"], ["0.8.12-7"]) end end describe "when preseeding a package" do before(:each) do allow(@provider).to receive(:get_preseed_file).and_return("/tmp/irssi-0.8.12-7.seed") end it "should get the full path to the preseed response file" do file = "/tmp/irssi-0.8.12-7.seed" expect(@provider).to receive(:shell_out!).with( "debconf-set-selections", "/tmp/irssi-0.8.12-7.seed", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ) @provider.preseed_package(file) end it "should run debconf-set-selections on the preseed file if it has changed" do expect(@provider).to receive(:shell_out!).with( "debconf-set-selections", "/tmp/irssi-0.8.12-7.seed", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ) file = @provider.get_preseed_file("irssi", "0.8.12-7") @provider.preseed_package(file) end it "should not run debconf-set-selections if the preseed file has not changed" do allow(@provider).to receive(:check_all_packages_state) @current_resource.version "0.8.11" @new_resource.response_file "/tmp/file" allow(@provider).to receive(:get_preseed_file).and_return(false) expect(@provider).not_to receive(:shell_out!) @provider.run_action(:reconfig) end end describe "when reconfiguring a package" do it "should run dpkg-reconfigure package" do expect(@provider).to receive(:shell_out!).with( "dpkg-reconfigure", "irssi", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ) @provider.reconfig_package("irssi", "0.8.12-7") end end describe "when locking a package" do it "should run apt-mark hold package" do expect(@provider).to receive(:shell_out!).with( "apt-mark", "hold", "irssi", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ) @provider.lock_package("irssi", "0.8.12-7") end end describe "when unlocking a package" do it "should run apt-mark unhold package" do expect(@provider).to receive(:shell_out!).with( "apt-mark", "unhold", "irssi", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ) @provider.unlock_package("irssi", "0.8.12-7") end end describe "when installing a virtual package" do it "should install the package without specifying a version" do @provider.package_data["libmysqlclient15-dev"][:virtual] = true expect(@provider).to receive(:shell_out!).with( "apt-get", "-q", "-y", "install", "libmysqlclient15-dev", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ) @provider.install_package(["libmysqlclient15-dev"], ["not_a_real_version"]) end end describe "when removing a virtual package" do it "should remove the resolved name instead of the virtual package name" do expect(@provider).to receive(:resolve_virtual_package_name).with("libmysqlclient15-dev").and_return("libmysqlclient-dev") expect(@provider).to receive(:shell_out!).with( "apt-get", "-q", "-y", "remove", "libmysqlclient-dev", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ) @provider.remove_package(["libmysqlclient15-dev"], ["not_a_real_version"]) end end describe "when purging a virtual package" do it "should purge the resolved name instead of the virtual package name" do expect(@provider).to receive(:resolve_virtual_package_name).with("libmysqlclient15-dev").and_return("libmysqlclient-dev") expect(@provider).to receive(:shell_out!).with( "apt-get", "-q", "-y", "purge", "libmysqlclient-dev", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ) @provider.purge_package(["libmysqlclient15-dev"], ["not_a_real_version"]) end end describe "when installing multiple packages" do it "can install a virtual package followed by a non-virtual package" do # https://github.com/chef/chef/issues/2914 expect(@provider).to receive(:shell_out!).with( "apt-get", "-q", "-y", "install", "libmysqlclient15-dev", "irssi=0.8.12-7", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ) @provider.install_package(["libmysqlclient15-dev", "irssi"], ["not_a_real_version", "0.8.12-7"]) end end describe "#action_install" do it "should run dpkg to compare versions if an existing version is installed" do allow(@provider).to receive(:get_current_versions).and_return("1.4.0") allow(@new_resource).to receive(:allow_downgrade).and_return(false) expect(@provider).to receive(:shell_out_compact_timeout).with( "dpkg", "--compare-versions", "1.4.0", "gt", "0.8.12-7" ).and_return(double(error?: false)) @provider.run_action(:upgrade) end it "should install the package if the installed version is older" do allow(@provider).to receive(:get_current_versions).and_return("0.4.0") allow(@new_resource).to receive(:allow_downgrade).and_return(false) expect(@provider).to receive(:version_compare).and_return(-1) expect(@provider).to receive(:shell_out!).with( "apt-get", "-q", "-y", "install", "irssi=0.8.12-7", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ) @provider.run_action(:upgrade) end it "should not compare versions if an existing version is not installed" do allow(@provider).to receive(:get_current_versions).and_return(nil) allow(@new_resource).to receive(:allow_downgrade).and_return(false) expect(@provider).not_to receive(:version_compare) expect(@provider).to receive(:shell_out!).with( "apt-get", "-q", "-y", "install", "irssi=0.8.12-7", :env => { "DEBIAN_FRONTEND" => "noninteractive" }, :timeout => @timeout ) @provider.run_action(:upgrade) end end end end end end
mal/chef
spec/unit/provider/package/apt_spec.rb
Ruby
apache-2.0
21,755
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Runtime.Remoting; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.Remote; using Microsoft.VisualStudio.LanguageServices.Remote; using Nerdbank; using Roslyn.Utilities; using StreamJsonRpc; namespace Roslyn.Test.Utilities.Remote { internal sealed class InProcRemoteHostClient : RemoteHostClient { private readonly InProcRemoteServices _inprocServices; private readonly ReferenceCountedDisposable<RemotableDataJsonRpc> _remotableDataRpc; private readonly JsonRpc _rpc; public static async Task<RemoteHostClient> CreateAsync(Workspace workspace, bool runCacheCleanup) { var inprocServices = new InProcRemoteServices(runCacheCleanup); // Create the RemotableDataJsonRpc before we create the remote host: this call implicitly sets up the remote IExperimentationService so that will be available for later calls var remotableDataRpc = new RemotableDataJsonRpc(workspace, inprocServices.Logger, await inprocServices.RequestServiceAsync(WellKnownServiceHubServices.SnapshotService).ConfigureAwait(false)); var remoteHostStream = await inprocServices.RequestServiceAsync(WellKnownRemoteHostServices.RemoteHostService).ConfigureAwait(false); var current = CreateClientId(Process.GetCurrentProcess().Id.ToString()); var instance = new InProcRemoteHostClient(current, workspace, inprocServices, new ReferenceCountedDisposable<RemotableDataJsonRpc>(remotableDataRpc), remoteHostStream); // make sure connection is done right var telemetrySession = default(string); var uiCultureLCIDE = 0; var cultureLCID = 0; var host = await instance._rpc.InvokeAsync<string>(nameof(IRemoteHostService.Connect), current, uiCultureLCIDE, cultureLCID, telemetrySession).ConfigureAwait(false); // TODO: change this to non fatal watson and make VS to use inproc implementation Contract.ThrowIfFalse(host == current.ToString()); instance.Started(); // return instance return instance; } private InProcRemoteHostClient( string clientId, Workspace workspace, InProcRemoteServices inprocServices, ReferenceCountedDisposable<RemotableDataJsonRpc> remotableDataRpc, Stream stream) : base(workspace) { Contract.ThrowIfNull(remotableDataRpc); ClientId = clientId; _inprocServices = inprocServices; _remotableDataRpc = remotableDataRpc; _rpc = stream.CreateStreamJsonRpc(target: this, inprocServices.Logger); // handle disconnected situation _rpc.Disconnected += OnRpcDisconnected; _rpc.StartListening(); } public AssetStorage AssetStorage => _inprocServices.AssetStorage; public void RegisterService(string name, Func<Stream, IServiceProvider, ServiceHubServiceBase> serviceCreator) { _inprocServices.RegisterService(name, serviceCreator); } public override string ClientId { get; } public override async Task<Connection> TryCreateConnectionAsync( string serviceName, object callbackTarget, CancellationToken cancellationToken) { // get stream from service hub to communicate service specific information // this is what consumer actually use to communicate information var serviceStream = await _inprocServices.RequestServiceAsync(serviceName).ConfigureAwait(false); return new JsonRpcConnection(_inprocServices.Logger, callbackTarget, serviceStream, _remotableDataRpc.TryAddReference()); } protected override void OnStarted() { } protected override void OnStopped() { // we are asked to disconnect. unsubscribe and dispose to disconnect _rpc.Disconnected -= OnRpcDisconnected; _rpc.Dispose(); _remotableDataRpc.Dispose(); } private void OnRpcDisconnected(object sender, JsonRpcDisconnectedEventArgs e) { Stopped(); } public class ServiceProvider : IServiceProvider { private static readonly TraceSource s_traceSource = new TraceSource("inprocRemoteClient"); private readonly AssetStorage _storage; public ServiceProvider(bool runCacheCleanup) { _storage = runCacheCleanup ? new AssetStorage(cleanupInterval: TimeSpan.FromSeconds(30), purgeAfter: TimeSpan.FromMinutes(1), gcAfter: TimeSpan.FromMinutes(5)) : new AssetStorage(); } public AssetStorage AssetStorage => _storage; public object GetService(Type serviceType) { if (typeof(TraceSource) == serviceType) { return s_traceSource; } if (typeof(AssetStorage) == serviceType) { return _storage; } throw ExceptionUtilities.UnexpectedValue(serviceType); } } private class InProcRemoteServices { private readonly ServiceProvider _serviceProvider; private readonly Dictionary<string, Func<Stream, IServiceProvider, ServiceHubServiceBase>> _creatorMap; public InProcRemoteServices(bool runCacheCleanup) { _serviceProvider = new ServiceProvider(runCacheCleanup); _creatorMap = new Dictionary<string, Func<Stream, IServiceProvider, ServiceHubServiceBase>>(); RegisterService(WellKnownRemoteHostServices.RemoteHostService, (s, p) => new RemoteHostService(s, p)); RegisterService(WellKnownServiceHubServices.CodeAnalysisService, (s, p) => new CodeAnalysisService(s, p)); RegisterService(WellKnownServiceHubServices.SnapshotService, (s, p) => new SnapshotService(s, p)); RegisterService(WellKnownServiceHubServices.RemoteSymbolSearchUpdateEngine, (s, p) => new RemoteSymbolSearchUpdateEngine(s, p)); } public AssetStorage AssetStorage => _serviceProvider.AssetStorage; public TraceSource Logger { get; } = new TraceSource("Default"); public void RegisterService(string name, Func<Stream, IServiceProvider, ServiceHubServiceBase> serviceCreator) { _creatorMap.Add(name, serviceCreator); } public Task<Stream> RequestServiceAsync(string serviceName) { if (_creatorMap.TryGetValue(serviceName, out var creator)) { var tuple = FullDuplexStream.CreateStreams(); return Task.FromResult<Stream>(new WrappedStream(creator(tuple.Item1, _serviceProvider), tuple.Item2)); } throw ExceptionUtilities.UnexpectedValue(serviceName); } private class WrappedStream : Stream { private readonly IDisposable _service; private readonly Stream _stream; public WrappedStream(IDisposable service, Stream stream) { // tie service's lifetime with that of stream _service = service; _stream = stream; } public override long Position { get { return _stream.Position; } set { _stream.Position = value; } } public override int ReadTimeout { get { return _stream.ReadTimeout; } set { _stream.ReadTimeout = value; } } public override int WriteTimeout { get { return _stream.WriteTimeout; } set { _stream.WriteTimeout = value; } } public override bool CanRead => _stream.CanRead; public override bool CanSeek => _stream.CanSeek; public override bool CanWrite => _stream.CanWrite; public override long Length => _stream.Length; public override bool CanTimeout => _stream.CanTimeout; public override void Flush() => _stream.Flush(); public override Task FlushAsync(CancellationToken cancellationToken) => _stream.FlushAsync(cancellationToken); public override long Seek(long offset, SeekOrigin origin) => _stream.Seek(offset, origin); public override void SetLength(long value) => _stream.SetLength(value); public override int ReadByte() => _stream.ReadByte(); public override void WriteByte(byte value) => _stream.WriteByte(value); public override int Read(byte[] buffer, int offset, int count) => _stream.Read(buffer, offset, count); public override void Write(byte[] buffer, int offset, int count) => _stream.Write(buffer, offset, count); public override Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) => _stream.ReadAsync(buffer, offset, count, cancellationToken); public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) => _stream.WriteAsync(buffer, offset, count, cancellationToken); public override IAsyncResult BeginRead(byte[] buffer, int offset, int count, AsyncCallback callback, object state) => _stream.BeginRead(buffer, offset, count, callback, state); public override int EndRead(IAsyncResult asyncResult) => _stream.EndRead(asyncResult); public override IAsyncResult BeginWrite(byte[] buffer, int offset, int count, AsyncCallback callback, object state) => _stream.BeginWrite(buffer, offset, count, callback, state); public override void EndWrite(IAsyncResult asyncResult) => _stream.EndWrite(asyncResult); public override Task CopyToAsync(Stream destination, int bufferSize, CancellationToken cancellationToken) => _stream.CopyToAsync(destination, bufferSize, cancellationToken); public override object InitializeLifetimeService() { throw new NotSupportedException(); } public override ObjRef CreateObjRef(Type requestedType) { throw new NotSupportedException(); } public override void Close() { _service.Dispose(); _stream.Close(); } protected override void Dispose(bool disposing) { base.Dispose(disposing); _service.Dispose(); _stream.Dispose(); } } } } }
aelij/roslyn
src/EditorFeatures/TestUtilities/Remote/InProcRemostHostClient.cs
C#
apache-2.0
11,474
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! namespace Google.Cloud.Monitoring.V3.Snippets { // [START monitoring_v3_generated_UptimeCheckService_CreateUptimeCheckConfig_sync_flattened] using Google.Cloud.Monitoring.V3; public sealed partial class GeneratedUptimeCheckServiceClientSnippets { /// <summary>Snippet for CreateUptimeCheckConfig</summary> /// <remarks> /// This snippet has been automatically generated for illustrative purposes only. /// It may require modifications to work in your environment. /// </remarks> public void CreateUptimeCheckConfig() { // Create client UptimeCheckServiceClient uptimeCheckServiceClient = UptimeCheckServiceClient.Create(); // Initialize request argument(s) string parent = "projects/[PROJECT]"; UptimeCheckConfig uptimeCheckConfig = new UptimeCheckConfig(); // Make the request UptimeCheckConfig response = uptimeCheckServiceClient.CreateUptimeCheckConfig(parent, uptimeCheckConfig); } } // [END monitoring_v3_generated_UptimeCheckService_CreateUptimeCheckConfig_sync_flattened] }
googleapis/google-cloud-dotnet
apis/Google.Cloud.Monitoring.V3/Google.Cloud.Monitoring.V3.GeneratedSnippets/UptimeCheckServiceClient.CreateUptimeCheckConfigSnippet.g.cs
C#
apache-2.0
1,776
package org.goodsManagement.service.impl.PoiUtils; import org.apache.poi.hssf.usermodel.HSSFSheet; import org.apache.poi.hssf.usermodel.HSSFWorkbook; import org.goodsManagement.po.GetGoodsDto; import org.goodsManagement.vo.GetGoodsVO; import org.springframework.stereotype.Component; import java.util.ArrayList; import java.util.List; /** * Created by lifei on 2015/9/23. */ @Component public class GetGoodsToExcel { /*public static void main(String[] args){ List<GetGoodsVO> list = new ArrayList<GetGoodsVO>(); GetGoodsVO a1 = new GetGoodsVO(); a1.setStaffname("大黄"); a1.setGoodname("屎"); a1.setGetnumber(2); a1.setGoodtype("一大坨"); list.add(a1); GetGoodsVO a2 = new GetGoodsVO(); a2.setStaffname("小黄"); a2.setGoodname("屎"); a2.setGetnumber(2); a2.setGoodtype("一桶"); list.add(a2); String path = "C:\\Users\\lifei\\Desktop\\getgood.xls"; GetGoodsToExcel.toExcel(list,path); System.out.println("导出完成"); }*/ /** * * @param list * 数据库表中人员领用记录的集合 * @param path * 要写入的文件的路径 */ public void addtoExcel(List<GetGoodsVO> list,String path){ HSSFWorkbook wb = new HSSFWorkbook(); HSSFSheet sheet = wb.createSheet("Outgoods"); String[] n = { "姓名", "物品名称号", "物品型号", "物品数量" }; Object[][] value = new Object[list.size() + 1][4]; for (int m = 0; m < n.length; m++) { value[0][m] = n[m]; } for (int i = 0; i < list.size(); i++) { GetGoodsVO getGoodsVOg= (GetGoodsVO) list.get(i); value[i + 1][0] = getGoodsVOg.getStaffname(); value[i + 1][1] = getGoodsVOg.getGoodname(); value[i + 1][2] = getGoodsVOg.getGoodtype(); value[i + 1][3] = getGoodsVOg.getGetnumber(); } ExcelUtils.writeArrayToExcel(wb, sheet, list.size() + 1, 4, value); ExcelUtils.writeWorkbook(wb, path); } }
sunshine-life/GoodsManagement
src/main/java/org/goodsManagement/service/impl/PoiUtils/GetGoodsToExcel.java
Java
apache-2.0
2,120
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.bean.validator.springboot; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.annotation.Generated; import org.apache.camel.CamelContext; import org.apache.camel.component.bean.validator.BeanValidatorComponent; import org.apache.camel.spi.ComponentCustomizer; import org.apache.camel.spi.HasId; import org.apache.camel.spring.boot.CamelAutoConfiguration; import org.apache.camel.spring.boot.ComponentConfigurationProperties; import org.apache.camel.spring.boot.util.CamelPropertiesHelper; import org.apache.camel.spring.boot.util.ConditionalOnCamelContextAndAutoConfigurationBeans; import org.apache.camel.spring.boot.util.GroupCondition; import org.apache.camel.spring.boot.util.HierarchicalPropertiesEvaluator; import org.apache.camel.support.IntrospectionSupport; import org.apache.camel.util.ObjectHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.AutoConfigureAfter; import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Lazy; /** * Generated by camel-package-maven-plugin - do not edit this file! */ @Generated("org.apache.camel.maven.packaging.SpringBootAutoConfigurationMojo") @Configuration @Conditional({ConditionalOnCamelContextAndAutoConfigurationBeans.class, BeanValidatorComponentAutoConfiguration.GroupConditions.class}) @AutoConfigureAfter(CamelAutoConfiguration.class) @EnableConfigurationProperties({ComponentConfigurationProperties.class, BeanValidatorComponentConfiguration.class}) public class BeanValidatorComponentAutoConfiguration { private static final Logger LOGGER = LoggerFactory .getLogger(BeanValidatorComponentAutoConfiguration.class); @Autowired private ApplicationContext applicationContext; @Autowired private CamelContext camelContext; @Autowired private BeanValidatorComponentConfiguration configuration; @Autowired(required = false) private List<ComponentCustomizer<BeanValidatorComponent>> customizers; static class GroupConditions extends GroupCondition { public GroupConditions() { super("camel.component", "camel.component.bean-validator"); } } @Lazy @Bean(name = "bean-validator-component") @ConditionalOnMissingBean(BeanValidatorComponent.class) public BeanValidatorComponent configureBeanValidatorComponent() throws Exception { BeanValidatorComponent component = new BeanValidatorComponent(); component.setCamelContext(camelContext); Map<String, Object> parameters = new HashMap<>(); IntrospectionSupport.getProperties(configuration, parameters, null, false); for (Map.Entry<String, Object> entry : parameters.entrySet()) { Object value = entry.getValue(); Class<?> paramClass = value.getClass(); if (paramClass.getName().endsWith("NestedConfiguration")) { Class nestedClass = null; try { nestedClass = (Class) paramClass.getDeclaredField( "CAMEL_NESTED_CLASS").get(null); HashMap<String, Object> nestedParameters = new HashMap<>(); IntrospectionSupport.getProperties(value, nestedParameters, null, false); Object nestedProperty = nestedClass.newInstance(); CamelPropertiesHelper.setCamelProperties(camelContext, nestedProperty, nestedParameters, false); entry.setValue(nestedProperty); } catch (NoSuchFieldException e) { } } } CamelPropertiesHelper.setCamelProperties(camelContext, component, parameters, false); if (ObjectHelper.isNotEmpty(customizers)) { for (ComponentCustomizer<BeanValidatorComponent> customizer : customizers) { boolean useCustomizer = (customizer instanceof HasId) ? HierarchicalPropertiesEvaluator.evaluate( applicationContext.getEnvironment(), "camel.component.customizer", "camel.component.bean-validator.customizer", ((HasId) customizer).getId()) : HierarchicalPropertiesEvaluator.evaluate( applicationContext.getEnvironment(), "camel.component.customizer", "camel.component.bean-validator.customizer"); if (useCustomizer) { LOGGER.debug("Configure component {}, with customizer {}", component, customizer); customizer.customize(component); } } } return component; } }
kevinearls/camel
platforms/spring-boot/components-starter/camel-bean-validator-starter/src/main/java/org/apache/camel/component/bean/validator/springboot/BeanValidatorComponentAutoConfiguration.java
Java
apache-2.0
6,273
//===- WriterUtils.cpp ----------------------------------------------------===// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// #include "WriterUtils.h" #include "lld/Common/ErrorHandler.h" #include "llvm/Support/Debug.h" #include "llvm/Support/EndianStream.h" #include "llvm/Support/LEB128.h" #define DEBUG_TYPE "lld" using namespace llvm; using namespace llvm::wasm; namespace lld { std::string toString(ValType type) { switch (type) { case ValType::I32: return "i32"; case ValType::I64: return "i64"; case ValType::F32: return "f32"; case ValType::F64: return "f64"; case ValType::V128: return "v128"; case ValType::EXNREF: return "exnref"; case ValType::EXTERNREF: return "externref"; } llvm_unreachable("Invalid wasm::ValType"); } std::string toString(const WasmSignature &sig) { SmallString<128> s("("); for (ValType type : sig.Params) { if (s.size() != 1) s += ", "; s += toString(type); } s += ") -> "; if (sig.Returns.empty()) s += "void"; else s += toString(sig.Returns[0]); return std::string(s.str()); } std::string toString(const WasmGlobalType &type) { return (type.Mutable ? "var " : "const ") + toString(static_cast<ValType>(type.Type)); } std::string toString(const WasmEventType &type) { if (type.Attribute == WASM_EVENT_ATTRIBUTE_EXCEPTION) return "exception"; return "unknown"; } namespace wasm { void debugWrite(uint64_t offset, const Twine &msg) { LLVM_DEBUG(dbgs() << format(" | %08lld: ", offset) << msg << "\n"); } void writeUleb128(raw_ostream &os, uint64_t number, const Twine &msg) { debugWrite(os.tell(), msg + "[" + utohexstr(number) + "]"); encodeULEB128(number, os); } void writeSleb128(raw_ostream &os, int64_t number, const Twine &msg) { debugWrite(os.tell(), msg + "[" + utohexstr(number) + "]"); encodeSLEB128(number, os); } void writeBytes(raw_ostream &os, const char *bytes, size_t count, const Twine &msg) { debugWrite(os.tell(), msg + " [data[" + Twine(count) + "]]"); os.write(bytes, count); } void writeStr(raw_ostream &os, StringRef string, const Twine &msg) { debugWrite(os.tell(), msg + " [str[" + Twine(string.size()) + "]: " + string + "]"); encodeULEB128(string.size(), os); os.write(string.data(), string.size()); } void writeU8(raw_ostream &os, uint8_t byte, const Twine &msg) { debugWrite(os.tell(), msg + " [0x" + utohexstr(byte) + "]"); os << byte; } void writeU32(raw_ostream &os, uint32_t number, const Twine &msg) { debugWrite(os.tell(), msg + "[0x" + utohexstr(number) + "]"); support::endian::write(os, number, support::little); } void writeU64(raw_ostream &os, uint64_t number, const Twine &msg) { debugWrite(os.tell(), msg + "[0x" + utohexstr(number) + "]"); support::endian::write(os, number, support::little); } void writeValueType(raw_ostream &os, ValType type, const Twine &msg) { writeU8(os, static_cast<uint8_t>(type), msg + "[type: " + toString(type) + "]"); } void writeSig(raw_ostream &os, const WasmSignature &sig) { writeU8(os, WASM_TYPE_FUNC, "signature type"); writeUleb128(os, sig.Params.size(), "param Count"); for (ValType paramType : sig.Params) { writeValueType(os, paramType, "param type"); } writeUleb128(os, sig.Returns.size(), "result Count"); for (ValType returnType : sig.Returns) { writeValueType(os, returnType, "result type"); } } void writeI32Const(raw_ostream &os, int32_t number, const Twine &msg) { writeU8(os, WASM_OPCODE_I32_CONST, "i32.const"); writeSleb128(os, number, msg); } void writeI64Const(raw_ostream &os, int64_t number, const Twine &msg) { writeU8(os, WASM_OPCODE_I64_CONST, "i64.const"); writeSleb128(os, number, msg); } void writeMemArg(raw_ostream &os, uint32_t alignment, uint64_t offset) { writeUleb128(os, alignment, "alignment"); writeUleb128(os, offset, "offset"); } void writeInitExpr(raw_ostream &os, const WasmInitExpr &initExpr) { writeU8(os, initExpr.Opcode, "opcode"); switch (initExpr.Opcode) { case WASM_OPCODE_I32_CONST: writeSleb128(os, initExpr.Value.Int32, "literal (i32)"); break; case WASM_OPCODE_I64_CONST: writeSleb128(os, initExpr.Value.Int64, "literal (i64)"); break; case WASM_OPCODE_F32_CONST: writeU32(os, initExpr.Value.Float32, "literal (f32)"); break; case WASM_OPCODE_F64_CONST: writeU64(os, initExpr.Value.Float64, "literal (f64)"); break; case WASM_OPCODE_GLOBAL_GET: writeUleb128(os, initExpr.Value.Global, "literal (global index)"); break; case WASM_OPCODE_REF_NULL: writeValueType(os, ValType::EXTERNREF, "literal (externref type)"); break; default: fatal("unknown opcode in init expr: " + Twine(initExpr.Opcode)); } writeU8(os, WASM_OPCODE_END, "opcode:end"); } void writeLimits(raw_ostream &os, const WasmLimits &limits) { writeU8(os, limits.Flags, "limits flags"); writeUleb128(os, limits.Initial, "limits initial"); if (limits.Flags & WASM_LIMITS_FLAG_HAS_MAX) writeUleb128(os, limits.Maximum, "limits max"); } void writeGlobalType(raw_ostream &os, const WasmGlobalType &type) { // TODO: Update WasmGlobalType to use ValType and remove this cast. writeValueType(os, ValType(type.Type), "global type"); writeU8(os, type.Mutable, "global mutable"); } void writeGlobal(raw_ostream &os, const WasmGlobal &global) { writeGlobalType(os, global.Type); writeInitExpr(os, global.InitExpr); } void writeEventType(raw_ostream &os, const WasmEventType &type) { writeUleb128(os, type.Attribute, "event attribute"); writeUleb128(os, type.SigIndex, "sig index"); } void writeEvent(raw_ostream &os, const WasmEvent &event) { writeEventType(os, event.Type); } void writeTableType(raw_ostream &os, const llvm::wasm::WasmTable &type) { writeU8(os, WASM_TYPE_FUNCREF, "table type"); writeLimits(os, type.Limits); } void writeImport(raw_ostream &os, const WasmImport &import) { writeStr(os, import.Module, "import module name"); writeStr(os, import.Field, "import field name"); writeU8(os, import.Kind, "import kind"); switch (import.Kind) { case WASM_EXTERNAL_FUNCTION: writeUleb128(os, import.SigIndex, "import sig index"); break; case WASM_EXTERNAL_GLOBAL: writeGlobalType(os, import.Global); break; case WASM_EXTERNAL_EVENT: writeEventType(os, import.Event); break; case WASM_EXTERNAL_MEMORY: writeLimits(os, import.Memory); break; case WASM_EXTERNAL_TABLE: writeTableType(os, import.Table); break; default: fatal("unsupported import type: " + Twine(import.Kind)); } } void writeExport(raw_ostream &os, const WasmExport &export_) { writeStr(os, export_.Name, "export name"); writeU8(os, export_.Kind, "export kind"); switch (export_.Kind) { case WASM_EXTERNAL_FUNCTION: writeUleb128(os, export_.Index, "function index"); break; case WASM_EXTERNAL_GLOBAL: writeUleb128(os, export_.Index, "global index"); break; case WASM_EXTERNAL_EVENT: writeUleb128(os, export_.Index, "event index"); break; case WASM_EXTERNAL_MEMORY: writeUleb128(os, export_.Index, "memory index"); break; case WASM_EXTERNAL_TABLE: writeUleb128(os, export_.Index, "table index"); break; default: fatal("unsupported export type: " + Twine(export_.Kind)); } } } // namespace wasm } // namespace lld
google/llvm-propeller
lld/wasm/WriterUtils.cpp
C++
apache-2.0
7,617
# Copyright 2015 Rafe Kaplan # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from . import parser class Rule(parser.Parser): def __init__(self, expected_attr_type=None): self.__expected_attr_type = expected_attr_type @property def attr_type(self): if self.__expected_attr_type: return self.__expected_attr_type else: try: inner_parser = self.__parser except AttributeError: raise NotImplementedError else: return inner_parser.attr_type @property def parser(self): return self.__parser @parser.setter def parser(self, value): if self.__expected_attr_type and self.__expected_attr_type != value.attr_type: raise ValueError('Unexpected attribute type') self.__parser = parser.as_parser(value) def _parse(self, state, *args, **kwargs): with state.open_scope(*args, **kwargs): self.__parser._parse(state) def __imod__(self, other): self.parser = other return self def __call__(self, *args, **kwargs): return RuleCall(self, *args, **kwargs) class RuleCall(parser.Unary): def __init__(self, rule, *args, **kwargs): if not isinstance(rule, Rule): raise TypeError('Expected rule to be type Rule, was {}'.format(type(rule).__name__)) super(RuleCall, self).__init__(rule) self.__args = args self.__kwargs = kwargs @property def args(self): return self.__args @property def kwargs(self): return dict(self.__kwargs) def _parse(self, state): args = [state.invoke(a) for a in self.__args] kwargs = {k: state.invoke(v) for k, v in self.__kwargs.items()} self.parser._parse(state, *args, **kwargs)
slobberchops/booze
src/booze/gin/rule.py
Python
apache-2.0
2,341
/* # Licensed Materials - Property of IBM # Copyright IBM Corp. 2019 */ package com.ibm.streamsx.topology.internal.logging; import java.util.logging.Level; import java.util.logging.Logger; public interface Logging { /** * Set the root logging levels from Python logging integer level. * @param levelS */ public static void setRootLevels(String levelS) { int loggingLevel = Integer.valueOf(levelS); Level level; if (loggingLevel >= 40) { level = Level.SEVERE; } else if (loggingLevel >= 30) { level = Level.WARNING; } else if (loggingLevel >= 20) { level = Level.CONFIG; } else { level = Level.FINE; } Logger.getLogger("").setLevel(level); } }
ddebrunner/streamsx.topology
java/src/com/ibm/streamsx/topology/internal/logging/Logging.java
Java
apache-2.0
800
package wikokit.base.wikt.db; import android.util.Log; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; /** Decompress ziped file. * * @see http://www.jondev.net/articles/Unzipping_Files_with_Android_%28Programmatically%29 */ public class Decompressor { private String _zipFile; private String _location; public Decompressor(String zipFile, String location) { _zipFile = zipFile; _location = location; _dirChecker(""); } public void unzip() { try { FileInputStream fin = new FileInputStream(_zipFile); ZipInputStream zin = new ZipInputStream(fin); ZipEntry ze = null; while ((ze = zin.getNextEntry()) != null) { Log.v("Decompress", "Unzipping " + ze.getName()); if(ze.isDirectory()) { _dirChecker(ze.getName()); } else { FileOutputStream fout = new FileOutputStream(_location + ze.getName()); for (int c = zin.read(); c != -1; c = zin.read()) { fout.write(c); } zin.closeEntry(); fout.close(); } } zin.close(); } catch(Exception e) { Log.e("Decompress", "unzip", e); } } private void _dirChecker(String dir) { File f = new File(_location + dir); if(!f.isDirectory()) { f.mkdirs(); } } }
componavt/wikokit
android/common_wiki_android/src/wikokit/base/wikt/db/Decompressor.java
Java
apache-2.0
1,533
// -------------------------------------------------------------------------------------------------------------------- // <summary> // The chat room controller. // </summary> // -------------------------------------------------------------------------------------------------------------------- namespace WebStreams.Sample { using System; using System.Collections.Concurrent; using System.Reactive.Linq; using System.Reactive.Subjects; using Dapr.WebStreams.Server; /// <summary> /// The chat room controller. /// </summary> [RoutePrefix("/chat")] public class ChatRoomController { /// <summary> /// The chat rooms. /// </summary> private readonly ConcurrentDictionary<string, ISubject<ChatEvent>> rooms = new ConcurrentDictionary<string, ISubject<ChatEvent>>(); /// <summary> /// The stream of room updates. /// </summary> private readonly ISubject<string> roomUpdates = new Subject<string>(); /// <summary> /// Joins the calling user to a chat room. /// </summary> /// <param name="room"> /// The room name. /// </param> /// <param name="user"> /// The joining user's name. /// </param> /// <param name="messages"> /// The stream of chat messages from the user. /// </param> /// <returns> /// The stream of chat events. /// </returns> [Route("join")] public IObservable<ChatEvent> JoinRoom(string room, string user, IObservable<string> messages) { // Get or create the room being requested. var roomStream = this.GetOrAddRoom(room); // Send a happy little join message. roomStream.OnNext(new ChatEvent { User = user, Message = "Joined!", Time = DateTime.UtcNow, Type = "presence" }); // Turn incoming messages into chat events and pipe them into the room. messages.Select(message => new ChatEvent { User = user, Message = message, Time = DateTime.UtcNow }) .Subscribe( roomStream.OnNext, () => roomStream.OnNext(new ChatEvent { User = user, Message = "Left.", Time = DateTime.UtcNow, Type = "presence" })); return roomStream; } /// <summary> /// Returns the stream of chat rooms. /// </summary> /// <returns>The stream of chat rooms.</returns> [Route("rooms")] public IObservable<string> GetRooms() { var result = new ReplaySubject<string>(); this.roomUpdates.Subscribe(result); foreach (var channel in this.rooms.Keys) { result.OnNext(channel); } return result; } /// <summary> /// Returns the chat room with the provided <paramref name="name"/>. /// </summary> /// <param name="name"> /// The room name. /// </param> /// <returns> /// The chat room with the provided <paramref name="name"/>. /// </returns> private ISubject<ChatEvent> GetOrAddRoom(string name) { var added = default(ISubject<ChatEvent>); var result = this.rooms.GetOrAdd( name, _ => added = new ReplaySubject<ChatEvent>(100)); // If a new room was actually added, fire an update. if (result.Equals(added)) { this.roomUpdates.OnNext(name); } return result; } } }
daprlabs/WebStreamSamples
ChatRoomController.cs
C#
apache-2.0
4,330
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.jms.tx; import org.apache.camel.Exchange; import org.apache.camel.Processor; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.test.spring.CamelSpringTestSupport; import org.junit.Test; import org.springframework.context.support.ClassPathXmlApplicationContext; /** * Simple unit test for transaction client EIP pattern and JMS. */ public class JMSTransactionalClientWithRollbackTest extends CamelSpringTestSupport { protected ClassPathXmlApplicationContext createApplicationContext() { return new ClassPathXmlApplicationContext( "/org/apache/camel/component/jms/tx/JMSTransactionalClientWithRollbackTest.xml"); } @Test public void testTransactionSuccess() throws Exception { MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedMessageCount(1); mock.expectedBodiesReceived("Bye World"); // success at 3rd attempt mock.message(0).header("count").isEqualTo(3); template.sendBody("activemq:queue:okay", "Hello World"); mock.assertIsSatisfied(); } public static class MyProcessor implements Processor { private int count; public void process(Exchange exchange) throws Exception { exchange.getIn().setBody("Bye World"); exchange.getIn().setHeader("count", ++count); } } }
Fabryprog/camel
components/camel-jms/src/test/java/org/apache/camel/component/jms/tx/JMSTransactionalClientWithRollbackTest.java
Java
apache-2.0
2,206
(function() { var head = document.head || document.getElementsByTagName('head')[0]; var style = null; var mobileScreenWidth = 768; // Make sure this value is equal to the width of .wy-nav-content in overrides.css. var initialContentWidth = 960; // Make sure this value is equal to the width of .wy-nav-side in theme.css. var sideWidth = 300; // Keeps the current width of .wy-nav-content. var contentWidth = initialContentWidth; // Centers the page content dynamically. function centerPage() { if (style) { head.removeChild(style); style = null; } var windowWidth = window.innerWidth; if (windowWidth <= mobileScreenWidth) { return; } var leftMargin = Math.max(0, (windowWidth - sideWidth - contentWidth) / 2); var scrollbarWidth = document.body ? windowWidth - document.body.clientWidth : 0; var css = ''; css += '.wy-nav-side { left: ' + leftMargin + 'px; }'; css += "\n"; css += '.wy-nav-content-wrap { margin-left: ' + (sideWidth + leftMargin) + 'px; }'; css += "\n"; css += '.github-fork-ribbon { margin-right: ' + (leftMargin - scrollbarWidth) + 'px; }'; css += "\n"; var newStyle = document.createElement('style'); newStyle.type = 'text/css'; if (newStyle.styleSheet) { newStyle.styleSheet.cssText = css; } else { newStyle.appendChild(document.createTextNode(css)); } head.appendChild(newStyle); style = newStyle; } centerPage(); window.addEventListener('resize', centerPage); // Adjust the position of the 'fork me at GitHub' ribbon after document.body is available, // so that we can calculate the width of the scroll bar correctly. window.addEventListener('DOMContentLoaded', centerPage); // Allow a user to drag the left or right edge of the content to resize the content. if (interact) { interact('.wy-nav-content').resizable({ edges: {left: true, right: true, bottom: false, top: false}, modifiers: [ interact.modifiers.restrictEdges({ outer: 'parent', endOnly: true }), interact.modifiers.restrictSize({ min: { width: initialContentWidth, height: 0 } }) ] }).on('resizemove', function (event) { var style = event.target.style; // Double the amount of change because the page is centered. contentWidth += event.deltaRect.width * 2; style.maxWidth = contentWidth + 'px'; centerPage(); }); } })();
trustin/sphinx-gradle-plugin
src/site/sphinx/_static/center_page.js
JavaScript
apache-2.0
2,528