code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
// // Copyright (C) 2016 - present Instructure, Inc. // // This file is part of Canvas. // // Canvas is free software: you can redistribute it and/or modify it under // the terms of the GNU Affero General Public License as published by the Free // Software Foundation, version 3 of the License. // // Canvas is distributed in the hope that it will be useful, but WITHOUT ANY // WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR // A PARTICULAR PURPOSE. See the GNU Affero General Public License for more // details. // // You should have received a copy of the GNU Affero General Public License along // with this program. If not, see <http://www.gnu.org/licenses/>. import _ from 'underscore' import Depaginate from 'jsx/shared/CheatDepaginator' const listUrl = () => ENV.ENROLLMENT_TERMS_URL const deserializeTerms = termGroups => _.flatten( _.map(termGroups, group => _.map(group.enrollment_terms, (term) => { const groupID = term.grading_period_group_id const newGroupID = _.isNumber(groupID) ? groupID.toString() : groupID return { id: term.id.toString(), name: term.name, startAt: term.start_at ? new Date(term.start_at) : null, endAt: term.end_at ? new Date(term.end_at) : null, createdAt: term.created_at ? new Date(term.created_at) : null, gradingPeriodGroupId: newGroupID, } }) ) ) export default { list (terms) { return new Promise((resolve, reject) => { Depaginate(listUrl()) .then(response => resolve(deserializeTerms(response))) .fail(error => reject(error)) }) } }
venturehive/canvas-lms
app/coffeescripts/api/enrollmentTermsApi.js
JavaScript
agpl-3.0
1,662
/*====================================================== ************ Pull To Refresh ************ ======================================================*/ app.initPullToRefresh = function (pageContainer) { var eventsTarget = $(pageContainer); if (!eventsTarget.hasClass('pull-to-refresh-content')) { eventsTarget = eventsTarget.find('.pull-to-refresh-content'); } if (!eventsTarget || eventsTarget.length === 0) return; var touchId, isTouched, isMoved, touchesStart = {}, isScrolling, touchesDiff, touchStartTime, container, refresh = false, useTranslate = false, startTranslate = 0, translate, scrollTop, wasScrolled, layer, triggerDistance, dynamicTriggerDistance, pullStarted; var page = eventsTarget.hasClass('page') ? eventsTarget : eventsTarget.parents('.page'); var hasNavbar = false; if (page.find('.navbar').length > 0 || page.parents('.navbar-fixed, .navbar-through').length > 0 || page.hasClass('navbar-fixed') || page.hasClass('navbar-through')) hasNavbar = true; if (page.hasClass('no-navbar')) hasNavbar = false; if (!hasNavbar) eventsTarget.addClass('pull-to-refresh-no-navbar'); container = eventsTarget; // Define trigger distance if (container.attr('data-ptr-distance')) { dynamicTriggerDistance = true; } else { triggerDistance = 44; } function handleTouchStart(e) { if (isTouched) { if (app.device.os === 'android') { if ('targetTouches' in e && e.targetTouches.length > 1) return; } else return; } /*jshint validthis:true */ container = $(this); if (container.hasClass('refreshing')) { return; } isMoved = false; pullStarted = false; isTouched = true; isScrolling = undefined; wasScrolled = undefined; if (e.type === 'touchstart') touchId = e.targetTouches[0].identifier; touchesStart.x = e.type === 'touchstart' ? e.targetTouches[0].pageX : e.pageX; touchesStart.y = e.type === 'touchstart' ? e.targetTouches[0].pageY : e.pageY; touchStartTime = (new Date()).getTime(); } function handleTouchMove(e) { if (!isTouched) return; var pageX, pageY, touch; if (e.type === 'touchmove') { if (touchId && e.touches) { for (var i = 0; i < e.touches.length; i++) { if (e.touches[i].identifier === touchId) { touch = e.touches[i]; } } } if (!touch) touch = e.targetTouches[0]; pageX = touch.pageX; pageY = touch.pageY; } else { pageX = e.pageX; pageY = e.pageY; } if (!pageX || !pageY) return; if (typeof isScrolling === 'undefined') { isScrolling = !!(isScrolling || Math.abs(pageY - touchesStart.y) > Math.abs(pageX - touchesStart.x)); } if (!isScrolling) { isTouched = false; return; } scrollTop = container[0].scrollTop; if (typeof wasScrolled === 'undefined' && scrollTop !== 0) wasScrolled = true; if (!isMoved) { /*jshint validthis:true */ container.removeClass('transitioning'); if (scrollTop > container[0].offsetHeight) { isTouched = false; return; } if (dynamicTriggerDistance) { triggerDistance = container.attr('data-ptr-distance'); if (triggerDistance.indexOf('%') >= 0) triggerDistance = container[0].offsetHeight * parseInt(triggerDistance, 10) / 100; } startTranslate = container.hasClass('refreshing') ? triggerDistance : 0; if (container[0].scrollHeight === container[0].offsetHeight || app.device.os !== 'ios') { useTranslate = true; } else { useTranslate = false; } } isMoved = true; touchesDiff = pageY - touchesStart.y; if (touchesDiff > 0 && scrollTop <= 0 || scrollTop < 0) { // iOS 8 fix if (app.device.os === 'ios' && parseInt(app.device.osVersion.split('.')[0], 10) > 7 && scrollTop === 0 && !wasScrolled) useTranslate = true; if (useTranslate) { e.preventDefault(); translate = (Math.pow(touchesDiff, 0.85) + startTranslate); container.transform('translate3d(0,' + translate + 'px,0)'); } if ((useTranslate && Math.pow(touchesDiff, 0.85) > triggerDistance) || (!useTranslate && touchesDiff >= triggerDistance * 2)) { refresh = true; container.addClass('pull-up').removeClass('pull-down'); } else { refresh = false; container.removeClass('pull-up').addClass('pull-down'); } if (!pullStarted) { container.trigger('pullstart'); pullStarted = true; } container.trigger('pullmove', { event: e, scrollTop: scrollTop, translate: translate, touchesDiff: touchesDiff }); } else { pullStarted = false; container.removeClass('pull-up pull-down'); refresh = false; return; } } function handleTouchEnd(e) { if (e.type === 'touchend' && e.changedTouches && e.changedTouches.length > 0 && touchId) { if (e.changedTouches[0].identifier !== touchId) return; } if (!isTouched || !isMoved) { isTouched = false; isMoved = false; return; } if (translate) { container.addClass('transitioning'); translate = 0; } container.transform(''); if (refresh) { container.addClass('refreshing'); container.trigger('refresh', { done: function () { app.pullToRefreshDone(container); } }); } else { container.removeClass('pull-down'); } isTouched = false; isMoved = false; if (pullStarted) container.trigger('pullend'); } // Attach Events var passiveListener = app.touchEvents.start === 'touchstart' && app.support.passiveListener ? {passive: true, capture: false} : false; eventsTarget.on(app.touchEvents.start, handleTouchStart, passiveListener); eventsTarget.on(app.touchEvents.move, handleTouchMove); eventsTarget.on(app.touchEvents.end, handleTouchEnd, passiveListener); // Detach Events on page remove if (page.length === 0) return; function destroyPullToRefresh() { eventsTarget.off(app.touchEvents.start, handleTouchStart); eventsTarget.off(app.touchEvents.move, handleTouchMove); eventsTarget.off(app.touchEvents.end, handleTouchEnd); } eventsTarget[0].f7DestroyPullToRefresh = destroyPullToRefresh; function detachEvents() { destroyPullToRefresh(); page.off('pageBeforeRemove', detachEvents); } page.on('pageBeforeRemove', detachEvents); }; app.pullToRefreshDone = function (container) { container = $(container); if (container.length === 0) container = $('.pull-to-refresh-content.refreshing'); container.removeClass('refreshing').addClass('transitioning'); container.transitionEnd(function () { container.removeClass('transitioning pull-up pull-down'); container.trigger('refreshdone'); }); }; app.pullToRefreshTrigger = function (container) { container = $(container); if (container.length === 0) container = $('.pull-to-refresh-content'); if (container.hasClass('refreshing')) return; container.addClass('transitioning refreshing'); container.trigger('refresh', { done: function () { app.pullToRefreshDone(container); } }); }; app.destroyPullToRefresh = function (pageContainer) { pageContainer = $(pageContainer); var pullToRefreshContent = pageContainer.hasClass('pull-to-refresh-content') ? pageContainer : pageContainer.find('.pull-to-refresh-content'); if (pullToRefreshContent.length === 0) return; if (pullToRefreshContent[0].f7DestroyPullToRefresh) pullToRefreshContent[0].f7DestroyPullToRefresh(); };
ayuzhin/web-apps
vendor/framework7/src/js/pull-to-refresh.js
JavaScript
agpl-3.0
8,596
/** * Copyright © MyCollab * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.mycollab.vaadin.web.ui; import com.vaadin.ui.CheckBox; import com.vaadin.ui.themes.ValoTheme; /** * @author MyCollab Ltd. * @since 3.0 */ public class CheckBoxDecor extends CheckBox { private static final long serialVersionUID = 1L; public CheckBoxDecor(String title, boolean value) { super(title, value); this.addStyleName(ValoTheme.CHECKBOX_SMALL); } }
aglne/mycollab
mycollab-web/src/main/java/com/mycollab/vaadin/web/ui/CheckBoxDecor.java
Java
agpl-3.0
1,104
/******************************************************************************* * This file is part of OpenNMS(R). * * Copyright (C) 2016-2016 The OpenNMS Group, Inc. * OpenNMS(R) is Copyright (C) 1999-2016 The OpenNMS Group, Inc. * * OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc. * * OpenNMS(R) is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published * by the Free Software Foundation, either version 3 of the License, * or (at your option) any later version. * * OpenNMS(R) is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with OpenNMS(R). If not, see: * http://www.gnu.org/licenses/ * * For more information contact: * OpenNMS(R) Licensing <license@opennms.org> * http://www.opennms.org/ * http://www.opennms.com/ *******************************************************************************/ package org.opennms.netmgt.poller.remote.metadata; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import org.junit.Before; import org.junit.Test; import org.opennms.netmgt.poller.remote.metadata.MetadataField.Validator; public class EmailValidatorTest { private Validator m_validator; @Before public void setUp() { m_validator = new EmailValidator(); } @Test public void testValid() { assertTrue(m_validator.isValid("ranger@opennms.org")); assertTrue(m_validator.isValid("ranger@monkey.esophagus")); assertTrue(m_validator.isValid("ranger@giant.list.of.sub.domains.com")); } @Test public void testInvalid() { assertFalse(m_validator.isValid("ranger@opennms")); assertFalse(m_validator.isValid("ranger.monkey.esophagus")); assertFalse(m_validator.isValid("ranger@")); assertFalse(m_validator.isValid("@foo.com")); assertFalse(m_validator.isValid("@foo.com.")); assertFalse(m_validator.isValid("@foo.com")); assertFalse(m_validator.isValid(".@foo.com")); assertFalse(m_validator.isValid(".e@foo.com")); } }
aihua/opennms
features/poller/remote/src/test/java/org/opennms/netmgt/poller/remote/metadata/EmailValidatorTest.java
Java
agpl-3.0
2,394
/* * ProActive Parallel Suite(TM): * The Open Source library for parallel and distributed * Workflows & Scheduling, Orchestration, Cloud Automation * and Big Data Analysis on Enterprise Grids & Clouds. * * Copyright (c) 2007 - 2017 ActiveEon * Contact: contact@activeeon.com * * This library is free software: you can redistribute it and/or * modify it under the terms of the GNU Affero General Public License * as published by the Free Software Foundation: version 3 of * the License. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * * If needed, contact us to obtain a release under GPL Version 2 or 3 * or a different license than the AGPL. */ package org.objectweb.proactive.core.jmx.mbean; import java.io.Serializable; /** * This interface is used to add a class loader to the MBean Server repository. * See JMX Specification, version 1.4 ; Chap 8.4.1 : 'A class loader is added to the repository if it is registered as an MBean'. * @author The ProActive Team */ public interface JMXClassLoaderMBean extends Serializable { }
paraita/programming
programming-core/src/main/java/org/objectweb/proactive/core/jmx/mbean/JMXClassLoaderMBean.java
Java
agpl-3.0
1,413
<?php /** * Copyright (C) 2020 Xibo Signage Ltd * * Xibo - Digital Signage - http://www.xibo.org.uk * * This file is part of Xibo. * * Xibo is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * any later version. * * Xibo is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with Xibo. If not, see <http://www.gnu.org/licenses/>. */ namespace Xibo\Widget; use GuzzleHttp\Client; use GuzzleHttp\Exception\RequestException; /** * Class TwitterBase * @package Xibo\Widget */ abstract class TwitterBase extends ModuleWidget { /** * Get a auth token * @return bool|mixed */ protected function getToken() { // Prepare the URL $url = 'https://api.twitter.com/oauth2/token'; // Prepare the consumer key and secret $key = base64_encode(urlencode($this->getSetting('apiKey')) . ':' . urlencode($this->getSetting('apiSecret'))); // Check to see if we have the bearer token already cached $cache = $this->getPool()->getItem($this->makeCacheKey('bearer_' . $key)); $token = $cache->get(); if ($cache->isHit()) { $this->getLog()->debug('Bearer Token served from cache'); return $token; } // We can take up to 30 seconds to request a new token $cache->lock(30); $this->getLog()->debug('Bearer Token served from API'); $client = new Client($this->getConfig()->getGuzzleProxy()); try { $response = $client->request('POST', $url, [ 'form_params' => [ 'grant_type' => 'client_credentials' ], 'headers' => [ 'Authorization' => 'Basic ' . $key ] ]); $result = json_decode($response->getBody()->getContents()); if ($result->token_type !== 'bearer') { $this->getLog()->error('Twitter API returned OK, but without a bearer token. ' . var_export($result, true)); return false; } // It is, so lets cache it // long times... $cache->set($result->access_token); $cache->expiresAfter(100000); $this->getPool()->saveDeferred($cache); return $result->access_token; } catch (RequestException $requestException) { $this->getLog()->error('Twitter API returned ' . $requestException->getMessage() . ' status. Unable to proceed.'); return false; } } /** * Search the twitter API * @param $token * @param $term * @param $language * @param string $resultType * @param string $geoCode * @param int $count * @return bool|mixed * @throws \GuzzleHttp\Exception\GuzzleException */ protected function searchApi($token, $term, $language = '', $resultType = 'mixed', $geoCode = '', $count = 15) { $client = new Client($this->getConfig()->getGuzzleProxy()); $query = [ 'q' => trim($term), 'result_type' => $resultType, 'count' => $count, 'include_entities' => true, 'tweet_mode' => 'extended' ]; if ($geoCode != '') $query['geocode'] = $geoCode; if ($language != '') $query['lang'] = $language; $this->getLog()->debug('Query is: ' . json_encode($query)); try { $request = $client->request('GET', 'https://api.twitter.com/1.1/search/tweets.json', [ 'headers' => [ 'Authorization' => 'Bearer ' . $token ], 'query' => $query ]); return json_decode($request->getBody()->getContents()); } catch (RequestException $requestException) { $this->getLog()->error('Unable to reach twitter api. ' . $requestException->getMessage()); return false; } } }
xibosignage/xibo-cms
lib/Widget/TwitterBase.php
PHP
agpl-3.0
4,380
/* * Kuali Coeus, a comprehensive research administration system for higher education. * * Copyright 2005-2016 Kuali, Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.kuali.coeus.propdev.impl.action; import org.apache.commons.lang3.StringUtils; import org.kuali.coeus.sys.framework.rule.KcTransactionalDocumentRuleBase; import org.kuali.rice.core.api.util.RiceKeyConstants; public class ProposalDevelopmentRejectionRule extends KcTransactionalDocumentRuleBase { private static final String ACTION_REASON = "proposalDevelopmentRejectionBean.actionReason"; public boolean proccessProposalDevelopmentRejection(ProposalDevelopmentActionBean bean) { boolean valid = true; if (StringUtils.isEmpty(bean.getActionReason())) { valid = false; String errorParams = ""; reportError(ACTION_REASON, RiceKeyConstants.ERROR_REQUIRED, errorParams); } return valid; } }
kuali/kc
coeus-impl/src/main/java/org/kuali/coeus/propdev/impl/action/ProposalDevelopmentRejectionRule.java
Java
agpl-3.0
1,586
"""Capa's specialized use of codejail.safe_exec.""" import hashlib from codejail.safe_exec import SafeExecException, json_safe from codejail.safe_exec import not_safe_exec as codejail_not_safe_exec from codejail.safe_exec import safe_exec as codejail_safe_exec from edx_django_utils.monitoring import function_trace import six from six import text_type from . import lazymod from .remote_exec import is_codejail_rest_service_enabled, get_remote_exec # Establish the Python environment for Capa. # Capa assumes float-friendly division always. # The name "random" is a properly-seeded stand-in for the random module. CODE_PROLOG = """\ from __future__ import absolute_import, division import os os.environ["OPENBLAS_NUM_THREADS"] = "1" # See TNL-6456 import random2 as random_module import sys from six.moves import xrange random = random_module.Random(%r) random.Random = random_module.Random sys.modules['random'] = random """ ASSUMED_IMPORTS = [ ("numpy", "numpy"), ("math", "math"), ("scipy", "scipy"), ("calc", "calc"), ("eia", "eia"), ("chemcalc", "chem.chemcalc"), ("chemtools", "chem.chemtools"), ("miller", "chem.miller"), ("draganddrop", "verifiers.draganddrop"), ] # We'll need the code from lazymod.py for use in safe_exec, so read it now. lazymod_py_file = lazymod.__file__ if lazymod_py_file.endswith("c"): lazymod_py_file = lazymod_py_file[:-1] with open(lazymod_py_file) as f: lazymod_py = f.read() LAZY_IMPORTS = [lazymod_py] for name, modname in ASSUMED_IMPORTS: LAZY_IMPORTS.append("{} = LazyModule('{}')\n".format(name, modname)) LAZY_IMPORTS = "".join(LAZY_IMPORTS) def update_hash(hasher, obj): """ Update a `hashlib` hasher with a nested object. To properly cache nested structures, we need to compute a hash from the entire structure, canonicalizing at every level. `hasher`'s `.update()` method is called a number of times, touching all of `obj` in the process. Only primitive JSON-safe types are supported. """ hasher.update(six.b(str(type(obj)))) if isinstance(obj, (tuple, list)): for e in obj: update_hash(hasher, e) elif isinstance(obj, dict): for k in sorted(obj): update_hash(hasher, k) update_hash(hasher, obj[k]) else: hasher.update(six.b(repr(obj))) @function_trace('safe_exec') def safe_exec( code, globals_dict, random_seed=None, python_path=None, extra_files=None, cache=None, limit_overrides_context=None, slug=None, unsafely=False, ): """ Execute python code safely. `code` is the Python code to execute. It has access to the globals in `globals_dict`, and any changes it makes to those globals are visible in `globals_dict` when this function returns. `random_seed` will be used to see the `random` module available to the code. `python_path` is a list of filenames or directories to add to the Python path before execution. If the name is not in `extra_files`, then it will also be copied into the sandbox. `extra_files` is a list of (filename, contents) pairs. These files are created in the sandbox. `cache` is an object with .get(key) and .set(key, value) methods. It will be used to cache the execution, taking into account the code, the values of the globals, and the random seed. `limit_overrides_context` is an optional string to be used as a key on the `settings.CODE_JAIL['limit_overrides']` dictionary in order to apply context-specific overrides to the codejail execution limits. If `limit_overrides_context` is omitted or not present in limit_overrides, then use the default limits specified insettings.CODE_JAIL['limits']. `slug` is an arbitrary string, a description that's meaningful to the caller, that will be used in log messages. If `unsafely` is true, then the code will actually be executed without sandboxing. """ # Check the cache for a previous result. if cache: safe_globals = json_safe(globals_dict) md5er = hashlib.md5() md5er.update(repr(code).encode('utf-8')) update_hash(md5er, safe_globals) key = "safe_exec.%r.%s" % (random_seed, md5er.hexdigest()) cached = cache.get(key) if cached is not None: # We have a cached result. The result is a pair: the exception # message, if any, else None; and the resulting globals dictionary. emsg, cleaned_results = cached globals_dict.update(cleaned_results) if emsg: raise SafeExecException(emsg) return # Create the complete code we'll run. code_prolog = CODE_PROLOG % random_seed if is_codejail_rest_service_enabled(): data = { "code": code_prolog + LAZY_IMPORTS + code, "globals_dict": globals_dict, "python_path": python_path, "limit_overrides_context": limit_overrides_context, "slug": slug, "unsafely": unsafely, "extra_files": extra_files, } emsg, exception = get_remote_exec(data) else: # Decide which code executor to use. if unsafely: exec_fn = codejail_not_safe_exec else: exec_fn = codejail_safe_exec # Run the code! Results are side effects in globals_dict. try: exec_fn( code_prolog + LAZY_IMPORTS + code, globals_dict, python_path=python_path, extra_files=extra_files, limit_overrides_context=limit_overrides_context, slug=slug, ) except SafeExecException as e: # Saving SafeExecException e in exception to be used later. exception = e emsg = text_type(e) else: emsg = None # Put the result back in the cache. This is complicated by the fact that # the globals dict might not be entirely serializable. if cache: cleaned_results = json_safe(globals_dict) cache.set(key, (emsg, cleaned_results)) # If an exception happened, raise it now. if emsg: raise exception
eduNEXT/edx-platform
common/lib/capa/capa/safe_exec/safe_exec.py
Python
agpl-3.0
6,279
<?php $module_name='Cosib_postsale'; $subpanel_layout = array ( 'top_buttons' => array ( 0 => array ( 'widget_class' => 'SubPanelTopCreateButton', ), 1 => array ( 'widget_class' => 'SubPanelTopSelectButton', 'popup_module' => 'Cosib_postsale', ), ), 'where' => '', 'list_fields' => array ( 'date_modified' => array ( 'vname' => 'LBL_DATE_MODIFIED', 'width' => '45%', ), 'edit_button' => array ( 'widget_class' => 'SubPanelEditButton', 'module' => 'Cosib_postsale', 'width' => '4%', ), 'remove_button' => array ( 'widget_class' => 'SubPanelRemoveButton', 'module' => 'Cosib_postsale', 'width' => '5%', ), ), );
yonkon/nedvig
modules/Cosib_postsale/metadata/subpanels/default.php
PHP
agpl-3.0
756
class CreateIdentities < ActiveRecord::Migration def change create_table :identities do |t| t.references :user, index: true t.string :provider t.string :uid t.timestamps null: false end end end
asm-products/pay-it-forward
db/migrate/20141010013730_create_identities.rb
Ruby
agpl-3.0
231
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import time from odoo import api, fields, models class ProductProduct(models.Model): _inherit = "product.product" date_from = fields.Date(compute='_compute_product_margin_fields_values', string='Margin Date From') date_to = fields.Date(compute='_compute_product_margin_fields_values', string='Margin Date To') invoice_state = fields.Selection(compute='_compute_product_margin_fields_values', selection=[ ('paid', 'Paid'), ('open_paid', 'Open and Paid'), ('draft_open_paid', 'Draft, Open and Paid') ], string='Invoice State', readonly=True) sale_avg_price = fields.Float(compute='_compute_product_margin_fields_values', string='Avg. Sale Unit Price', help="Avg. Price in Customer Invoices.") purchase_avg_price = fields.Float(compute='_compute_product_margin_fields_values', string='Avg. Purchase Unit Price', help="Avg. Price in Vendor Bills ") sale_num_invoiced = fields.Float(compute='_compute_product_margin_fields_values', string='# Invoiced in Sale', help="Sum of Quantity in Customer Invoices") purchase_num_invoiced = fields.Float(compute='_compute_product_margin_fields_values', string='# Invoiced in Purchase', help="Sum of Quantity in Vendor Bills") sales_gap = fields.Float(compute='_compute_product_margin_fields_values', string='Sales Gap', help="Expected Sale - Turn Over") purchase_gap = fields.Float(compute='_compute_product_margin_fields_values', string='Purchase Gap', help="Normal Cost - Total Cost") turnover = fields.Float(compute='_compute_product_margin_fields_values', string='Turnover', help="Sum of Multiplication of Invoice price and quantity of Customer Invoices") total_cost = fields.Float(compute='_compute_product_margin_fields_values', string='Total Cost', help="Sum of Multiplication of Invoice price and quantity of Vendor Bills ") sale_expected = fields.Float(compute='_compute_product_margin_fields_values', string='Expected Sale', help="Sum of Multiplication of Sale Catalog price and quantity of Customer Invoices") normal_cost = fields.Float(compute='_compute_product_margin_fields_values', string='Normal Cost', help="Sum of Multiplication of Cost price and quantity of Vendor Bills") total_margin = fields.Float(compute='_compute_product_margin_fields_values', string='Total Margin', help="Turnover - Standard price") expected_margin = fields.Float(compute='_compute_product_margin_fields_values', string='Expected Margin', help="Expected Sale - Normal Cost") total_margin_rate = fields.Float(compute='_compute_product_margin_fields_values', string='Total Margin Rate(%)', help="Total margin * 100 / Turnover") expected_margin_rate = fields.Float(compute='_compute_product_margin_fields_values', string='Expected Margin (%)', help="Expected margin * 100 / Expected Sale") @api.model def read_group(self, domain, fields, groupby, offset=0, limit=None, orderby=False, lazy=True): """ Inherit read_group to calculate the sum of the non-stored fields, as it is not automatically done anymore through the XML. """ res = super(ProductProduct, self).read_group(domain, fields, groupby, offset=offset, limit=limit, orderby=orderby, lazy=lazy) fields_list = ['turnover', 'sale_avg_price', 'sale_purchase_price', 'sale_num_invoiced', 'purchase_num_invoiced', 'sales_gap', 'purchase_gap', 'total_cost', 'sale_expected', 'normal_cost', 'total_margin', 'expected_margin', 'total_margin_rate', 'expected_margin_rate'] if any(x in fields for x in fields_list): # Calculate first for every product in which line it needs to be applied re_ind = 0 prod_re = {} tot_products = self.browse([]) for re in res: if re.get('__domain'): products = self.search(re['__domain']) tot_products |= products for prod in products: prod_re[prod.id] = re_ind re_ind += 1 res_val = tot_products._compute_product_margin_fields_values(field_names=[x for x in fields if fields in fields_list]) for key in res_val: for l in res_val[key]: re = res[prod_re[key]] if re.get(l): re[l] += res_val[key][l] else: re[l] = res_val[key][l] return res def _compute_product_margin_fields_values(self, field_names=None): res = {} if field_names is None: field_names = [] for val in self: res[val.id] = {} date_from = self.env.context.get('date_from', time.strftime('%Y-01-01')) date_to = self.env.context.get('date_to', time.strftime('%Y-12-31')) invoice_state = self.env.context.get('invoice_state', 'open_paid') res[val.id]['date_from'] = date_from res[val.id]['date_to'] = date_to res[val.id]['invoice_state'] = invoice_state states = () payment_states = () if invoice_state == 'paid': states = ('posted',) payment_states = ('paid',) elif invoice_state == 'open_paid': states = ('posted',) payment_states = ('not_paid', 'paid') elif invoice_state == 'draft_open_paid': states = ('posted', 'draft') payment_states = ('not_paid', 'paid') company_id = self.env.company.id #Cost price is calculated afterwards as it is a property self.env['account.move.line'].flush(['price_unit', 'quantity', 'balance', 'product_id', 'display_type']) self.env['account.move'].flush(['state', 'payment_state', 'move_type', 'invoice_date', 'company_id']) self.env['product.template'].flush(['list_price']) sqlstr = """ WITH currency_rate AS ({}) SELECT SUM(l.price_unit / (CASE COALESCE(cr.rate, 0) WHEN 0 THEN 1.0 ELSE cr.rate END) * l.quantity) / NULLIF(SUM(l.quantity),0) AS avg_unit_price, SUM(l.quantity * (CASE WHEN i.move_type IN ('out_invoice', 'in_invoice') THEN 1 ELSE -1 END)) AS num_qty, SUM(ABS(l.balance) * (CASE WHEN i.move_type IN ('out_invoice', 'in_invoice') THEN 1 ELSE -1 END)) AS total, SUM(l.quantity * pt.list_price * (CASE WHEN i.move_type IN ('out_invoice', 'in_invoice') THEN 1 ELSE -1 END)) AS sale_expected FROM account_move_line l LEFT JOIN account_move i ON (l.move_id = i.id) LEFT JOIN product_product product ON (product.id=l.product_id) LEFT JOIN product_template pt ON (pt.id = product.product_tmpl_id) left join currency_rate cr on (cr.currency_id = i.currency_id and cr.company_id = i.company_id and cr.date_start <= COALESCE(i.invoice_date, NOW()) and (cr.date_end IS NULL OR cr.date_end > COALESCE(i.invoice_date, NOW()))) WHERE l.product_id = %s AND i.state IN %s AND i.payment_state IN %s AND i.move_type IN %s AND i.invoice_date BETWEEN %s AND %s AND i.company_id = %s AND l.display_type IS NULL AND l.exclude_from_invoice_tab = false """.format(self.env['res.currency']._select_companies_rates()) invoice_types = ('out_invoice', 'out_refund') self.env.cr.execute(sqlstr, (val.id, states, payment_states, invoice_types, date_from, date_to, company_id)) result = self.env.cr.fetchall()[0] res[val.id]['sale_avg_price'] = result[0] and result[0] or 0.0 res[val.id]['sale_num_invoiced'] = result[1] and result[1] or 0.0 res[val.id]['turnover'] = result[2] and result[2] or 0.0 res[val.id]['sale_expected'] = result[3] and result[3] or 0.0 res[val.id]['sales_gap'] = res[val.id]['sale_expected'] - res[val.id]['turnover'] invoice_types = ('in_invoice', 'in_refund') self.env.cr.execute(sqlstr, (val.id, states, payment_states, invoice_types, date_from, date_to, company_id)) result = self.env.cr.fetchall()[0] res[val.id]['purchase_avg_price'] = result[0] and result[0] or 0.0 res[val.id]['purchase_num_invoiced'] = result[1] and result[1] or 0.0 res[val.id]['total_cost'] = result[2] and result[2] or 0.0 res[val.id]['normal_cost'] = val.standard_price * res[val.id]['purchase_num_invoiced'] res[val.id]['purchase_gap'] = res[val.id]['normal_cost'] - res[val.id]['total_cost'] res[val.id]['total_margin'] = res[val.id]['turnover'] - res[val.id]['total_cost'] res[val.id]['expected_margin'] = res[val.id]['sale_expected'] - res[val.id]['normal_cost'] res[val.id]['total_margin_rate'] = res[val.id]['turnover'] and res[val.id]['total_margin'] * 100 / res[val.id]['turnover'] or 0.0 res[val.id]['expected_margin_rate'] = res[val.id]['sale_expected'] and res[val.id]['expected_margin'] * 100 / res[val.id]['sale_expected'] or 0.0 for k, v in res[val.id].items(): setattr(val, k, v) return res
ygol/odoo
addons/product_margin/models/product_product.py
Python
agpl-3.0
9,711
# clean sequences after alignment, criteria based on sequences # make inline with canonical ordering (no extra gaps) import os, datetime, time, re from itertools import izip from Bio.Align import MultipleSeqAlignment from Bio.Seq import Seq from scipy import stats import numpy as np class virus_clean(object): """docstring for virus_clean""" def __init__(self,n_iqd = 5, **kwargs): ''' parameters n_std -- number of interquartile distances accepted in molecular clock filter ''' self.n_iqd = n_iqd def remove_insertions(self): ''' remove all columns from the alignment in which the outgroup is gapped ''' outgroup_ok = np.array(self.sequence_lookup[self.outgroup['strain']])!='-' for seq in self.viruses: seq.seq = Seq("".join(np.array(seq.seq)[outgroup_ok]).upper()) def clean_gaps(self): ''' remove viruses with gaps -- not part of the standard pipeline ''' self.viruses = filter(lambda x: '-' in x.seq, self.viruses) def clean_ambiguous(self): ''' substitute all ambiguous characters with '-', ancestral inference will interpret this as missing data ''' for v in self.viruses: v.seq = Seq(re.sub(r'[BDEFHIJKLMNOPQRSUVWXYZ]', '-',str(v.seq))) def unique_date(self): ''' add a unique numerical date to each leaf. uniqueness is achieved adding a small number ''' from date_util import numerical_date og = self.sequence_lookup[self.outgroup['strain']] if hasattr(og, 'date'): try: og.num_date = numerical_date(og.date) except: print "cannot parse date" og.num_date="undefined"; for ii, v in enumerate(self.viruses): if hasattr(v, 'date'): try: v.num_date = numerical_date(v.date, self.date_format['fields']) + 1e-7*(ii+1) except: print "cannot parse date" v.num_date="undefined"; def times_from_outgroup(self): outgroup_date = self.sequence_lookup[self.outgroup['strain']].num_date return np.array([x.num_date-outgroup_date for x in self.viruses if x.strain]) def distance_from_outgroup(self): from seq_util import hamming_distance outgroup_seq = self.sequence_lookup[self.outgroup['strain']].seq return np.array([hamming_distance(x.seq, outgroup_seq) for x in self.viruses if x.strain]) def clean_distances(self): """Remove viruses that don't follow a loose clock """ times = self.times_from_outgroup() distances = self.distance_from_outgroup() slope, intercept, r_value, p_value, std_err = stats.linregress(times, distances) residuals = slope*times + intercept - distances r_iqd = stats.scoreatpercentile(residuals,75) - stats.scoreatpercentile(residuals,25) if self.verbose: print "\tslope: " + str(slope) print "\tr: " + str(r_value) print "\tresiduals iqd: " + str(r_iqd) new_viruses = [] for (v,r) in izip(self.viruses,residuals): # filter viruses more than n_std standard devitations up or down if np.abs(r)<self.n_iqd * r_iqd or v.id == self.outgroup["strain"]: new_viruses.append(v) else: if self.verbose>1: print "\t\tresidual:", r, "\nremoved ",v.strain self.viruses = MultipleSeqAlignment(new_viruses) def clean_generic(self): print "Number of viruses before cleaning:",len(self.viruses) self.unique_date() self.remove_insertions() self.clean_ambiguous() self.clean_distances() self.viruses.sort(key=lambda x:x.num_date) print "Number of viruses after outlier filtering:",len(self.viruses)
doerlbh/Indie-nextflu
augur/src/virus_clean.py
Python
agpl-3.0
3,403
<?php // Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0. // See the LICENCE file in the repository root for full licence text. return [ 'availability' => [ 'disabled' => 'Ez a beatmap jelenleg nem letölthető.', 'parts-removed' => 'Ez a beatmap eltávolításra került a készítő vagy egy jogbirtokos harmadik fél kérésére.', 'more-info' => 'Itt találsz több információt.', 'rule_violation' => 'Ennek a map-nek néhány elemét eltávolítottuk, mert nem találtuk őket megfelelőnek az osu!-ban történő használathoz.', ], 'download' => [ 'limit_exceeded' => 'Lassíts le, játssz többet.', ], 'featured_artist_badge' => [ 'label' => 'Kiemelt előadó', ], 'index' => [ 'title' => 'Beatmap lista', 'guest_title' => 'Beatmap-ek', ], 'panel' => [ 'empty' => 'nincs beatmap', 'download' => [ 'all' => 'letöltés', 'video' => 'letöltés videóval', 'no_video' => 'letöltés videó nélkül', 'direct' => 'megnyitás osu!direct-ben', ], ], 'nominate' => [ 'hybrid_requires_modes' => 'Egy hibrid beatmap szettet legalább egy játékmódra nominálni kell.', 'incorrect_mode' => 'Nincs jogosultságod :mode módban nominálni', 'full_bn_required' => 'Teljes jogú nominátornak kell lenned a kvalifikálásra nomináláshoz.', 'too_many' => 'A nominálási követelmények már teljesültek.', 'dialog' => [ 'confirmation' => 'Biztosan nominálni szeretnéd ezt a Beatmap-et?', 'header' => 'Beatmap Nominálása', 'hybrid_warning' => 'megjegyzés: csak egyszer nominálhatsz, ezért kérlek győződj meg róla, hogy minden játékmódra nominálsz, amire szeretnél', 'which_modes' => 'Mely módokra nominálsz?', ], ], 'nsfw_badge' => [ 'label' => 'Felnőtt', ], 'show' => [ 'discussion' => 'Beszélgetés', 'details' => [ 'by_artist' => ':artist által', 'favourite' => 'A beatmap kedvencek közé tétele', 'favourite_login' => 'Jelentkezz be, hogy kedvencnek jelölt ezt beatmap-et', 'logged-out' => 'Beatmapek letöltéshez be kell jelentkezned!', 'mapped_by' => 'mappolva :mapper által', 'unfavourite' => 'Beatmap eltávolitása a kedvencek közül', 'updated_timeago' => 'utóljára frissítve: :timeago', 'download' => [ '_' => 'Letöltés', 'direct' => '', 'no-video' => 'Videó nélkül', 'video' => 'Videóval', ], 'login_required' => [ 'bottom' => 'további funkciók eléréséhez', 'top' => 'Bejelentkezés', ], ], 'details_date' => [ 'approved' => 'jóváhagyva: :timeago', 'loved' => 'szerette: :timeago', 'qualified' => 'kvalifikálva: :timeago', 'ranked' => 'rangsorolva: :timeago', 'submitted' => 'beküldve: :timeago', 'updated' => 'utolsó frissítés: :timeago', ], 'favourites' => [ 'limit_reached' => 'Túl sok beatmap van a kedvenceid között! Kérlek távolíts el néhányat az újrapróbálkozás előtt.', ], 'hype' => [ 'action' => 'Hype-old a beatmapet ha élvezted rajta a játékot, hogy segíthesd a <strong>Rangsorolt</strong> állapot felé jutásban.', 'current' => [ '_' => 'Ez a map :status jelenleg.', 'status' => [ 'pending' => 'függőben', 'qualified' => 'kvalifikált', 'wip' => 'munkálatok alatt', ], ], 'disqualify' => [ '_' => 'Ha találsz javaslatokat, problémákat a térképpel kapcsolatban, kérlek diszkvalifikáld ezen a linken keresztül: :link', ], 'report' => [ '_' => 'Ha találsz javaslatokat, problémákat a térképpel kapcsolatban, kérlek jelentsd az alábbi linken keresztül: :link', 'button' => 'Probléma jelentése', 'link' => 'itt', ], ], 'info' => [ 'description' => 'Leírás', 'genre' => 'Műfaj', 'language' => 'Nyelv', 'no_scores' => 'Az adatok még számítás alatt...', 'nsfw' => 'Felnőtt tartalom', 'points-of-failure' => 'Kibukási Alkalmak', 'source' => 'Forrás', 'storyboard' => 'Ez a beatmap storyboard-ot tartalmaz', 'success-rate' => 'Teljesítési arány', 'tags' => 'Címkék', 'video' => 'Ez a beatmap videót tartalmaz', ], 'nsfw_warning' => [ 'details' => 'Ez a beatmap szókimondó, sértő vagy felkavaró tartalmú. Továbbra is meg szeretnéd tekinteni?', 'title' => 'Felnőtt tartalom', 'buttons' => [ 'disable' => 'Figyelmeztetés kikapcsolása', 'listing' => 'Beatmap lista', 'show' => 'Mutassa', ], ], 'scoreboard' => [ 'achieved' => 'elérve: :when', 'country' => 'Országos Ranglista', 'error' => '', 'friend' => 'Baráti Ranglista', 'global' => 'Globális Ranglista', 'supporter-link' => 'Kattints <a href=":link">ide</a>,hogy megtekinthesd azt a sok jó funkciót amit kaphatsz!', 'supporter-only' => 'Támogató kell legyél, hogy elérd a baráti és az országos ranglistát!', 'title' => 'Eredménylista', 'headers' => [ 'accuracy' => 'Pontosság', 'combo' => 'Legmagasabb kombó', 'miss' => 'Miss', 'mods' => 'Modok', 'pin' => '', 'player' => 'Játékos', 'pp' => '', 'rank' => 'Rang', 'score' => 'Pontszám', 'score_total' => 'Összpontszám', 'time' => 'Idő', ], 'no_scores' => [ 'country' => 'Senki sem ért még el eredményt az országodból ezen a map-en!', 'friend' => 'Senki sem ért még el eredményt a barátaid közül ezen a map-en!', 'global' => 'Egyetlen eredmény sincs. Esetleg megpróbálhatnál szerezni párat?', 'loading' => 'Eredmények betöltése...', 'unranked' => 'Rangsorolatlan beatmap.', ], 'score' => [ 'first' => 'Az élen', 'own' => 'A legjobbad', ], 'supporter_link' => [ '_' => '', 'here' => '', ], ], 'stats' => [ 'cs' => 'Kör nagyság', 'cs-mania' => 'Billentyűk száma', 'drain' => 'HP Vesztés', 'accuracy' => 'Pontosság', 'ar' => 'Közelítési sebesség', 'stars' => 'Nehézség', 'total_length' => 'Hossz', 'bpm' => 'BPM', 'count_circles' => 'Körök Száma', 'count_sliders' => 'Sliderek Száma', 'user-rating' => 'Felhasználói Értékelés', 'rating-spread' => 'Értékelési Szórás', 'nominations' => 'Nominálások', 'playcount' => 'Játékszám', ], 'status' => [ 'ranked' => 'Rangsorolt', 'approved' => 'Jóváhagyott', 'loved' => 'Szeretett', 'qualified' => 'Kvalifikálva', 'wip' => 'Készítés alatt', 'pending' => 'Függőben', 'graveyard' => 'Temető', ], ], ];
ppy/osu-web
resources/lang/hu/beatmapsets.php
PHP
agpl-3.0
8,051
/** \file * \author John Bridgman * \brief */ #include <Variant/Blob.h> #include <stdlib.h> #include <new> #include <string.h> #include <algorithm> namespace libvariant { static void MallocFree(void *ptr, void *) { free(ptr); } shared_ptr<Blob> Blob::Create(void *ptr, unsigned len, BlobFreeFunc ffunc, void *context) { struct iovec iov = { ptr, len }; return shared_ptr<Blob>(new Blob(&iov, 1, ffunc, context)); } BlobPtr Blob::Create(struct iovec *iov, unsigned iov_len, BlobFreeFunc ffunc, void *context) { return BlobPtr(new Blob(iov, iov_len, ffunc, context)); } shared_ptr<Blob> Blob::CreateCopy(const void *ptr, unsigned len) { struct iovec iov = { (void*)ptr, len }; return CreateCopy(&iov, 1); } BlobPtr Blob::CreateCopy(const struct iovec *iov, unsigned iov_len) { unsigned len = 0; for (unsigned i = 0; i < iov_len; ++i) { len += iov[i].iov_len; } void *data = 0; #ifdef __APPLE__ // TODO: Remove when apple fixes this error. if (posix_memalign(&data, 64, std::max(len, 1u)) != 0) { throw std::bad_alloc(); } #else if (posix_memalign(&data, 64, len) != 0) { throw std::bad_alloc(); } #endif for (unsigned i = 0, copied = 0; i < iov_len; ++i) { memcpy((char*)data + copied, iov[i].iov_base, iov[i].iov_len); copied += iov[i].iov_len; } struct iovec v = { data, len }; return shared_ptr<Blob>(new Blob(&v, 1, MallocFree, 0)); } shared_ptr<Blob> Blob::CreateFree(void *ptr, unsigned len) { struct iovec iov = { ptr, len }; return CreateFree(&iov, 1); } BlobPtr Blob::CreateFree(struct iovec *iov, unsigned iov_len) { return shared_ptr<Blob>(new Blob(iov, iov_len, MallocFree, 0)); } shared_ptr<Blob> Blob::CreateReferenced(void *ptr, unsigned len) { struct iovec iov = { ptr, len }; return CreateReferenced(&iov, 1); } BlobPtr Blob::CreateReferenced(struct iovec *iov, unsigned iov_len) { return shared_ptr<Blob>(new Blob(iov, iov_len, 0, 0)); } Blob::Blob(struct iovec *v, unsigned l, BlobFreeFunc f, void *c) : iov(v, v+l), free_func(f), ctx(c) { } Blob::~Blob() { if (free_func) { for (unsigned i = 0; i < iov.size(); ++i) { free_func(iov[i].iov_base, ctx); } } iov.clear(); free_func = 0; ctx = 0; } shared_ptr<Blob> Blob::Copy() const { return CreateCopy(&iov[0], iov.size()); } unsigned Blob::GetTotalLength() const { unsigned size = 0; for (unsigned i = 0; i < iov.size(); ++i) { size += iov[i].iov_len; } return size; } int Blob::Compare(ConstBlobPtr other) const { unsigned our_offset = 0; unsigned oth_offset = 0; unsigned i = 0, j = 0; while (i < GetNumBuffers() && j < other->GetNumBuffers()) { unsigned len = std::min(GetLength(i) - our_offset, other->GetLength(j) - oth_offset); int res = memcmp((char*)(GetPtr(i)) + our_offset, (char*)(other->GetPtr(j)) + oth_offset, len); if (res != 0) { return res; } our_offset += len; if (our_offset >= GetLength(i)) { our_offset = 0; ++i; } oth_offset += len; if (oth_offset >= other->GetLength(j)) { oth_offset = 0; ++j; } } return 0; } }
telefonicaid/fiware-IoTAgent-Cplusplus
third_party/variant/src/Blob.cc
C++
agpl-3.0
3,102
<?php /********************************************************************************* * Zurmo is a customer relationship management program developed by * Zurmo, Inc. Copyright (C) 2014 Zurmo Inc. * * Zurmo is free software; you can redistribute it and/or modify it under * the terms of the GNU Affero General Public License version 3 as published by the * Free Software Foundation with the addition of the following permission added * to Section 15 as permitted in Section 7(a): FOR ANY PART OF THE COVERED WORK * IN WHICH THE COPYRIGHT IS OWNED BY ZURMO, ZURMO DISCLAIMS THE WARRANTY * OF NON INFRINGEMENT OF THIRD PARTY RIGHTS. * * Zurmo is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more * details. * * You should have received a copy of the GNU Affero General Public License along with * this program; if not, see http://www.gnu.org/licenses or write to the Free * Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA * 02110-1301 USA. * * You can contact Zurmo, Inc. with a mailing address at 27 North Wacker Drive * Suite 370 Chicago, IL 60606. or at email address contact@zurmo.com. * * The interactive user interfaces in original and modified versions * of this program must display Appropriate Legal Notices, as required under * Section 5 of the GNU Affero General Public License version 3. * * In accordance with Section 7(b) of the GNU Affero General Public License version 3, * these Appropriate Legal Notices must retain the display of the Zurmo * logo and Zurmo copyright notice. If the display of the logo is not reasonably * feasible for technical reasons, the Appropriate Legal Notices must display the words * "Copyright Zurmo Inc. 2014. All rights reserved". ********************************************************************************/ class MergeTagGuideAjaxLinkActionElement extends AjaxLinkActionElement { public function getActionType() { return 'MergeTagGuide'; } public function render() { $this->registerScript(); return parent::render(); } public function renderMenuItem() { $this->registerScript(); return parent::renderMenuItem(); } protected function getDefaultLabel() { return Zurmo::t('EmailTemplatesModule', 'MergeTag Guide'); } protected function getDefaultRoute() { return Yii::app()->createUrl($this->moduleId . '/' . $this->controllerId . '/mergeTagGuide/'); } protected function getAjaxOptions() { $parentAjaxOptions = parent::getAjaxOptions(); $modalViewAjaxOptions = ModalView::getAjaxOptionsForModalLink($this->getDefaultLabel()); if (!isset($this->params['ajaxOptions'])) { $this->params['ajaxOptions'] = array(); } return CMap::mergeArray($parentAjaxOptions, $modalViewAjaxOptions, $this->params['ajaxOptions']); } protected function getHtmlOptions() { $htmlOptionsInParams = parent::getHtmlOptions(); $defaultHtmlOptions = array('id' => 'mergetag-guide', 'class' => 'simple-link'); return CMap::mergeArray($defaultHtmlOptions, $htmlOptionsInParams); } protected function registerScript() { $eventHandlerName = get_class($this); $ajaxOptions = CMap::mergeArray($this->getAjaxOptions(), array('url' => $this->route)); if (Yii::app()->clientScript->isScriptRegistered($eventHandlerName)) { return; } else { Yii::app()->clientScript->registerScript($eventHandlerName, " function ". $eventHandlerName ."() { " . ZurmoHtml::ajax($ajaxOptions)." } ", CClientScript::POS_HEAD); } return $eventHandlerName; } } ?>
speixoto/zurmo-for-school
app/protected/modules/emailTemplates/elements/actions/MergeTagGuideAjaxLinkActionElement.php
PHP
agpl-3.0
4,382
class AddPublicDiscussionsCount < ActiveRecord::Migration def change add_column :groups, :public_discussions_count, :integer, null: false, default: 0 end end
mhjb/loomio
db/migrate/20160301094551_add_public_discussions_count.rb
Ruby
agpl-3.0
166
<?php use MapasCulturais\i; $section = ''; $groups = $this->getDictGroups(); $editEntity = $this->controller->action === 'create' || $this->controller->action === 'edit'; $texts = \MapasCulturais\Themes\BaseV1\Theme::_dict(); ?> <div id="texts" class="aba-content"> <p class="alert info"> <?php i::_e('Nesta seção você configura os textos utilizados na interface do site. Cada texto tem uma explicação do local em que deverá aparecer a informação. A opção de “exibir opções avançadas” possibilita que outros campos apareçam para definição dos textos.'); ?> </p> <?php foreach($groups as $gname => $group): ?> <section class="filter-section"> <header> <?php echo $group['title']; ?> <label class="show-all"><input class="js-exibir-todos" type="checkbox"> <?php i::_e('exibir opções avançadas'); ?></label> </header> <p class="help"><?php echo $group['description']; ?></p> <?php foreach ($texts as $key => $def): $skey = str_replace(' ', '+', $key); $section = substr($key, 0, strpos($key, ":")); if($section != $gname) continue; ?> <p class="js-text-config <?php if (isset($def['required']) && $def['required']): ?> required<?php else: ?> js-optional hidden<?php endif; ?>"> <span class="label"> <?php echo $def['name'] ?><?php if ($def['description']): ?><span class="info hltip" title="<?= htmlentities($def['description']) ?>"></span><?php endif; ?>: </span> <span class="js-editable js-editable--subsite-text" data-edit="<?php echo "dict:" . $skey ?>" data-original-title="<?php echo htmlentities($def['name']) ?>" data-emptytext="<?php echo isset($entity->dict[$key]) && !empty($entity->dict[$key])? '': 'utilizando valor padrão (clique para definir)';?>" <?php if (isset($def['examples']) && $def['examples']): ?>data-examples="<?= htmlentities(json_encode($def['examples'])) ?>" <?php endif; ?> data-placeholder='<?php echo isset($entity->dict[$key]) && !empty($entity->dict[$key])? $entity->dict[$key]:$def['text'] ; ?>'><?php echo isset($entity->dict[$key]) ? $entity->dict[$key] : ''; ?></span> </p> <?php endforeach; ?> </section> <?php endforeach; ?> </div>
secultce/mapasculturais
src/protected/application/themes/BaseV1/layouts/parts/singles/subsite-texts.php
PHP
agpl-3.0
2,527
<?php /********************************************************************************* * Zurmo is a customer relationship management program developed by * Zurmo, Inc. Copyright (C) 2014 Zurmo Inc. * * Zurmo is free software; you can redistribute it and/or modify it under * the terms of the GNU Affero General Public License version 3 as published by the * Free Software Foundation with the addition of the following permission added * to Section 15 as permitted in Section 7(a): FOR ANY PART OF THE COVERED WORK * IN WHICH THE COPYRIGHT IS OWNED BY ZURMO, ZURMO DISCLAIMS THE WARRANTY * OF NON INFRINGEMENT OF THIRD PARTY RIGHTS. * * Zurmo is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more * details. * * You should have received a copy of the GNU Affero General Public License along with * this program; if not, see http://www.gnu.org/licenses or write to the Free * Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA * 02110-1301 USA. * * You can contact Zurmo, Inc. with a mailing address at 27 North Wacker Drive * Suite 370 Chicago, IL 60606. or at email address contact@zurmo.com. * * The interactive user interfaces in original and modified versions * of this program must display Appropriate Legal Notices, as required under * Section 5 of the GNU Affero General Public License version 3. * * In accordance with Section 7(b) of the GNU Affero General Public License version 3, * these Appropriate Legal Notices must retain the display of the Zurmo * logo and Zurmo copyright notice. If the display of the logo is not reasonably * feasible for technical reasons, the Appropriate Legal Notices must display the words * "Copyright Zurmo Inc. 2014. All rights reserved". ********************************************************************************/ /** * Default controller for all report actions */ class ReportsDefaultController extends ZurmoBaseController { public function filters() { return array_merge(parent::filters(), array( array( self::getRightsFilterPath() . ' + drillDownDetails', 'moduleClassName' => 'ReportsModule', 'rightName' => ReportsModule::RIGHT_ACCESS_REPORTS, ), array( self::getRightsFilterPath() . ' + selectType', 'moduleClassName' => 'ReportsModule', 'rightName' => ReportsModule::RIGHT_CREATE_REPORTS, ), array( ZurmoModuleController::ZERO_MODELS_CHECK_FILTER_PATH . ' + list, index', 'controller' => $this, ), ) ); } public function actionIndex() { $this->actionList(); } public function actionList() { $pageSize = Yii::app()->pagination->resolveActiveForCurrentUserByType( 'listPageSize', get_class($this->getModule())); $savedReport = new SavedReport(false); $searchForm = new ReportsSearchForm($savedReport); $listAttributesSelector = new ListAttributesSelector('ReportsListView', get_class($this->getModule())); $searchForm->setListAttributesSelector($listAttributesSelector); $dataProvider = $this->resolveSearchDataProvider( $searchForm, $pageSize, null, 'ReportsSearchView' ); $title = Zurmo::t('ReportsModule', 'Reports'); $breadCrumbLinks = array( $title, ); if (isset($_GET['ajax']) && $_GET['ajax'] == 'list-view') { $mixedView = $this->makeListView( $searchForm, $dataProvider ); $view = new ReportsPageView($mixedView); } else { $mixedView = $this->makeActionBarSearchAndListView($searchForm, $dataProvider, 'SecuredActionBarForReportsSearchAndListView'); $view = new ReportsPageView(ZurmoDefaultViewUtil:: makeViewWithBreadcrumbsForCurrentUser( $this, $mixedView, $breadCrumbLinks, 'ReportBreadCrumbView')); } echo $view->render(); } public function actionDetails($id) { $savedReport = static::getModelAndCatchNotFoundAndDisplayError('SavedReport', intval($id)); ControllerSecurityUtil::resolveCanCurrentUserAccessModule($savedReport->moduleClassName); ControllerSecurityUtil::resolveAccessCanCurrentUserReadModel($savedReport); AuditEvent::logAuditEvent('ZurmoModule', ZurmoModule::AUDIT_EVENT_ITEM_VIEWED, array(strval($savedReport), 'ReportsModule'), $savedReport); $breadCrumbLinks = array(strval($savedReport)); $breadCrumbView = new ReportBreadCrumbView($this->getId(), $this->getModule()->getId(), $breadCrumbLinks); $detailsAndRelationsView = $this->makeReportDetailsAndRelationsView($savedReport, Yii::app()->request->getRequestUri(), $breadCrumbView); $view = new ReportsPageView(ZurmoDefaultViewUtil:: makeStandardViewForCurrentUser($this, $detailsAndRelationsView)); echo $view->render(); } public function actionSelectType() { $breadCrumbLinks = array(Zurmo::t('ReportsModule', 'Select Report Type')); $view = new ReportsPageView(ZurmoDefaultViewUtil:: makeViewWithBreadcrumbsForCurrentUser( $this, new ReportWizardTypesGridView(), $breadCrumbLinks, 'ReportBreadCrumbView')); echo $view->render(); } public function actionCreate($type = null) { if ($type == null) { $this->actionSelectType(); Yii::app()->end(0, false); } $breadCrumbLinks = array(Zurmo::t('Core', 'Create')); assert('is_string($type)'); $report = new Report(); $report->setType($type); $progressBarAndStepsView = ReportWizardViewFactory::makeStepsAndProgressBarViewFromReport($report); $reportWizardView = ReportWizardViewFactory::makeViewFromReport($report); $view = new ReportsPageView(ZurmoDefaultViewUtil:: makeTwoViewsWithBreadcrumbsForCurrentUser( $this, $progressBarAndStepsView, $reportWizardView, $breadCrumbLinks, 'ReportBreadCrumbView')); echo $view->render(); } public function actionEdit($id, $isBeingCopied = false) { $savedReport = SavedReport::getById((int)$id); ControllerSecurityUtil::resolveCanCurrentUserAccessModule($savedReport->moduleClassName); if (!$isBeingCopied) { ControllerSecurityUtil::resolveAccessCanCurrentUserWriteModel($savedReport); } $breadCrumbLinks = array(strval($savedReport)); $report = SavedReportToReportAdapter::makeReportBySavedReport($savedReport); $progressBarAndStepsView = ReportWizardViewFactory::makeStepsAndProgressBarViewFromReport($report); $reportWizardView = ReportWizardViewFactory::makeViewFromReport($report, (bool)$isBeingCopied); $view = new ReportsPageView(ZurmoDefaultViewUtil:: makeTwoViewsWithBreadcrumbsForCurrentUser( $this, $progressBarAndStepsView, $reportWizardView, $breadCrumbLinks, 'ReportBreadCrumbView')); echo $view->render(); } public function actionSave($type, $id = null, $isBeingCopied = false) { $postData = PostUtil::getData(); $savedReport = null; $report = null; $this->resolveSavedReportAndReportByPostData($postData, $savedReport, $report, $type, $id, (bool)$isBeingCopied); $reportToWizardFormAdapter = new ReportToWizardFormAdapter($report); $model = $reportToWizardFormAdapter->makeFormByType(); if (isset($postData['ajax']) && $postData['ajax'] === 'edit-form') { $errorData = ReportUtil::validateReportWizardForm($postData, $model); echo CJSON::encode($errorData); Yii::app()->end(0, false); } $explicitReadWriteModelPermissions = ExplicitReadWriteModelPermissionsUtil:: resolveByPostDataAndModelThenMake($postData[get_class($model)], $savedReport); SavedReportToReportAdapter::resolveReportToSavedReport($report, $savedReport); if ($savedReport->id > 0) { ControllerSecurityUtil::resolveCanCurrentUserAccessModule($savedReport->moduleClassName); } ControllerSecurityUtil::resolveAccessCanCurrentUserWriteModel($savedReport); if ($savedReport->save()) { StickyReportUtil::clearDataByKey($savedReport->id); if ($explicitReadWriteModelPermissions != null) { ExplicitReadWriteModelPermissionsUtil::resolveExplicitReadWriteModelPermissions($savedReport, $explicitReadWriteModelPermissions); } //i can do a safety check on perms, then do flash here, on the jscript we can go to list instead and this should come up... //make sure you add to list of things to test. $redirectToList = $this->resolveAfterSaveHasPermissionsProblem($savedReport, $postData[get_class($model)]['name']); echo CJSON::encode(array('id' => $savedReport->id, 'redirectToList' => $redirectToList)); Yii::app()->end(0, false); } else { throw new FailedToSaveModelException(); } } public function actionRelationsAndAttributesTree($type, $treeType, $id = null, $nodeId = null, $isBeingCopied = false) { $postData = PostUtil::getData(); $savedReport = null; $report = null; $this->resolveSavedReportAndReportByPostData($postData, $savedReport, $report, $type, $id, (bool)$isBeingCopied); if ($nodeId != null) { $reportToTreeAdapter = new ReportRelationsAndAttributesToTreeAdapter($report, $treeType); echo ZurmoTreeView::saveDataAsJson($reportToTreeAdapter->getData($nodeId)); Yii::app()->end(0, false); } $view = new ReportRelationsAndAttributesTreeView($type, $treeType, 'edit-form'); $content = $view->render(); Yii::app()->getClientScript()->setToAjaxMode(); Yii::app()->getClientScript()->render($content); echo $content; } public function actionAddAttributeFromTree($type, $treeType, $nodeId, $rowNumber, $trackableStructurePosition = false, $id = null, $isBeingCopied = false) { $postData = PostUtil::getData(); $savedReport = null; $report = null; $this->resolveSavedReportAndReportByPostData($postData, $savedReport, $report, $type, $id, (bool)$isBeingCopied); ReportUtil::processAttributeAdditionFromTree($nodeId, $treeType, $report, $rowNumber, $trackableStructurePosition); } public function actionGetAvailableSeriesAndRangesForChart($type, $id = null, $isBeingCopied = false) { $postData = PostUtil::getData(); $savedReport = null; $report = null; $this->resolveSavedReportAndReportByPostData($postData, $savedReport, $report, $type, $id, (bool)$isBeingCopied); $moduleClassName = $report->getModuleClassName(); $modelClassName = $moduleClassName::getPrimaryModelName(); $modelToReportAdapter = ModelRelationsAndAttributesToReportAdapter:: make($moduleClassName, $modelClassName, $report->getType()); if (!$modelToReportAdapter instanceof ModelRelationsAndAttributesToSummationReportAdapter) { throw new NotSupportedException(); } $seriesAttributesData = $modelToReportAdapter-> getAttributesForChartSeries($report->getGroupBys(), $report->getDisplayAttributes()); $rangeAttributesData = $modelToReportAdapter-> getAttributesForChartRange ($report->getDisplayAttributes()); $dataAndLabels = array(); $dataAndLabels['firstSeriesDataAndLabels'] = array('' => Zurmo::t('Core', '(None)')); $dataAndLabels['firstSeriesDataAndLabels'] = array_merge($dataAndLabels['firstSeriesDataAndLabels'], ReportUtil::makeDataAndLabelsForSeriesOrRange($seriesAttributesData)); $dataAndLabels['firstRangeDataAndLabels'] = array('' => Zurmo::t('Core', '(None)')); $dataAndLabels['firstRangeDataAndLabels'] = array_merge($dataAndLabels['firstRangeDataAndLabels'], ReportUtil::makeDataAndLabelsForSeriesOrRange($rangeAttributesData)); $dataAndLabels['secondSeriesDataAndLabels'] = array('' => Zurmo::t('Core', '(None)')); $dataAndLabels['secondSeriesDataAndLabels'] = array_merge($dataAndLabels['secondSeriesDataAndLabels'], ReportUtil::makeDataAndLabelsForSeriesOrRange($seriesAttributesData)); $dataAndLabels['secondRangeDataAndLabels'] = array('' => Zurmo::t('Core', '(None)')); $dataAndLabels['secondRangeDataAndLabels'] = array_merge($dataAndLabels['secondRangeDataAndLabels'], ReportUtil::makeDataAndLabelsForSeriesOrRange($rangeAttributesData)); echo CJSON::encode($dataAndLabels); } public function actionApplyRuntimeFilters($id) { $postData = PostUtil::getData(); $savedReport = SavedReport::getById((int)$id); ControllerSecurityUtil::resolveCanCurrentUserAccessModule($savedReport->moduleClassName); ControllerSecurityUtil::resolveAccessCanCurrentUserReadModel($savedReport); $report = SavedReportToReportAdapter::makeReportBySavedReport($savedReport); $wizardFormClassName = ReportToWizardFormAdapter::getFormClassNameByType($report->getType()); if (!isset($postData[$wizardFormClassName])) { throw new NotSupportedException(); } DataToReportUtil::resolveFilters($postData[$wizardFormClassName], $report, true); if (isset($postData['ajax']) && $postData['ajax'] == 'edit-form') { $adapter = new ReportToWizardFormAdapter($report); $reportWizardForm = $adapter->makeFormByType(); $reportWizardForm->setScenario(reportWizardForm::FILTERS_VALIDATION_SCENARIO); if (!$reportWizardForm->validate()) { $errorData = array(); foreach ($reportWizardForm->getErrors() as $attribute => $errors) { $errorData[ZurmoHtml::activeId($reportWizardForm, $attribute)] = $errors; } echo CJSON::encode($errorData); Yii::app()->end(0, false); } } $filtersData = ArrayUtil::getArrayValue($postData[$wizardFormClassName], ComponentForReportForm::TYPE_FILTERS); $sanitizedFiltersData = DataToReportUtil::sanitizeFiltersData($report->getModuleClassName(), $report->getType(), $filtersData); $stickyData = array(ComponentForReportForm::TYPE_FILTERS => $sanitizedFiltersData); StickyReportUtil::setDataByKeyAndData($report->getId(), $stickyData); } public function actionResetRuntimeFilters($id) { $savedReport = SavedReport::getById((int)$id); ControllerSecurityUtil::resolveCanCurrentUserAccessModule($savedReport->moduleClassName); ControllerSecurityUtil::resolveAccessCanCurrentUserReadModel($savedReport); $report = SavedReportToReportAdapter::makeReportBySavedReport($savedReport); StickyReportUtil::clearDataByKey($report->getId()); } public function actionDelete($id) { $savedReport = SavedReport::GetById(intval($id)); ControllerSecurityUtil::resolveAccessCanCurrentUserDeleteModel($savedReport); $savedReport->delete(); $this->redirect(array($this->getId() . '/index')); } public function actionDrillDownDetails($id, $rowId) { $savedReport = SavedReport::getById((int)$id); ControllerSecurityUtil::resolveCanCurrentUserAccessModule($savedReport->moduleClassName); ControllerSecurityUtil::resolveAccessCanCurrentUserReadModel($savedReport, true); $report = SavedReportToReportAdapter::makeReportBySavedReport($savedReport); $report->resolveGroupBysAsFilters(GetUtil::getData()); if (null != $stickyData = StickyReportUtil::getDataByKey($report->id)) { StickyReportUtil::resolveStickyDataToReport($report, $stickyData); } $pageSize = Yii::app()->pagination->resolveActiveForCurrentUserByType( 'reportResultsSubListPageSize', get_class($this->getModule())); $dataProvider = ReportDataProviderFactory::makeForSummationDrillDown($report, $pageSize); $dataProvider->setRunReport(true); $view = new SummationDrillDownReportResultsGridView('default', 'reports', $dataProvider, $rowId); $content = $view->render(); Yii::app()->getClientScript()->setToAjaxMode(); Yii::app()->getClientScript()->render($content); echo $content; } public function actionExport($id, $stickySearchKey = null) { assert('$stickySearchKey == null || is_string($stickySearchKey)'); $savedReport = SavedReport::getById((int)$id); ControllerSecurityUtil::resolveCanCurrentUserAccessModule($savedReport->moduleClassName); ControllerSecurityUtil::resolveAccessCanCurrentUserReadModel($savedReport); $report = SavedReportToReportAdapter::makeReportBySavedReport($savedReport); $dataProvider = $this->getDataProviderForExport($report, $report->getId(), false); $totalItems = intval($dataProvider->calculateTotalItemCount()); $data = array(); if ($totalItems > 0) { if ($totalItems <= ExportModule::$asynchronousThreshold) { // Output csv file directly to user browser if ($dataProvider) { $reportToExportAdapter = ReportToExportAdapterFactory::createReportToExportAdapter($report, $dataProvider); $headerData = $reportToExportAdapter->getHeaderData(); $data = $reportToExportAdapter->getData(); } // Output data if (count($data)) { $fileName = $this->getModule()->getName() . ".csv"; ExportItemToCsvFileUtil::export($data, $headerData, $fileName, true); } else { Yii::app()->user->setFlash('notification', Zurmo::t('ZurmoModule', 'There is no data to export.') ); } } else { if ($dataProvider) { $serializedData = ExportUtil::getSerializedDataForExport($dataProvider); } // Create background job $exportItem = new ExportItem(); $exportItem->isCompleted = 0; $exportItem->exportFileType = 'csv'; $exportItem->exportFileName = $this->getModule()->getName(); $exportItem->modelClassName = 'SavedReport'; $exportItem->serializedData = $serializedData; $exportItem->save(); $exportItem->forget(); Yii::app()->user->setFlash('notification', Zurmo::t('ZurmoModule', 'A large amount of data has been requested for export. You will receive ' . 'a notification with the download link when the export is complete.') ); } } else { Yii::app()->user->setFlash('notification', Zurmo::t('ZurmoModule', 'There is no data to export.') ); } $this->redirect(array($this->getId() . '/index')); } public function actionModalList($stateMetadataAdapterClassName = null) { $modalListLinkProvider = new SelectFromRelatedEditModalListLinkProvider( $_GET['modalTransferInformation']['sourceIdFieldId'], $_GET['modalTransferInformation']['sourceNameFieldId'], $_GET['modalTransferInformation']['modalId'] ); echo ModalSearchListControllerUtil:: setAjaxModeAndRenderModalSearchList($this, $modalListLinkProvider, $stateMetadataAdapterClassName); } public function actionAutoComplete($term, $moduleClassName = null, $type = null, $autoCompleteOptions = null) { $pageSize = Yii::app()->pagination->resolveActiveForCurrentUserByType( 'autoCompleteListPageSize', get_class($this->getModule())); $autoCompleteResults = ReportAutoCompleteUtil::getByPartialName($term, $pageSize, $moduleClassName, $type, $autoCompleteOptions); echo CJSON::encode($autoCompleteResults); } protected function resolveCanCurrentUserAccessReports() { if (!RightsUtil::doesUserHaveAllowByRightName('ReportsModule', ReportsModule::RIGHT_CREATE_REPORTS, Yii::app()->user->userModel)) { $messageView = new AccessFailureView(); $view = new AccessFailurePageView($messageView); echo $view->render(); Yii::app()->end(0, false); } return true; } protected function resolveSavedReportAndReportByPostData(Array $postData, & $savedReport, & $report, $type, $id = null, $isBeingCopied = false) { if ($id == null) { $this->resolveCanCurrentUserAccessReports(); $savedReport = new SavedReport(); $report = new Report(); $report->setType($type); } elseif ($isBeingCopied) { $savedReport = new SavedReport(); $oldReport = SavedReport::getById(intval($id)); ControllerSecurityUtil::resolveAccessCanCurrentUserReadModel($oldReport); ZurmoCopyModelUtil::copy($oldReport, $savedReport); $report = SavedReportToReportAdapter::makeReportBySavedReport($savedReport); } else { $savedReport = SavedReport::getById(intval($id)); ControllerSecurityUtil::resolveAccessCanCurrentUserWriteModel($savedReport); $report = SavedReportToReportAdapter::makeReportBySavedReport($savedReport); } DataToReportUtil::resolveReportByWizardPostData($report, $postData, ReportToWizardFormAdapter::getFormClassNameByType($type)); } protected function resolveAfterSaveHasPermissionsProblem(SavedReport $savedReport, $modelToStringValue) { assert('is_string($modelToStringValue)'); if (ControllerSecurityUtil::doesCurrentUserHavePermissionOnSecurableItem($savedReport, Permission::READ)) { return false; } else { $notificationContent = Zurmo::t( 'ReportsModule', 'You no longer have permissions to access {modelName}.', array('{modelName}' => $modelToStringValue) ); Yii::app()->user->setFlash('notification', $notificationContent); return true; } } protected function makeReportDetailsAndRelationsView(SavedReport $savedReport, $redirectUrl, ReportBreadCrumbView $breadCrumbView) { $reportDetailsAndRelationsView = ReportDetailsAndResultsViewFactory::makeView($savedReport, $this->getId(), $this->getModule()->getId(), $redirectUrl); $gridView = new GridView(2, 1); $gridView->setView($breadCrumbView, 0, 0); $gridView->setView($reportDetailsAndRelationsView, 1, 0); return $gridView; } protected function getDataProviderForExport(Report $report, $stickyKey, $runReport) { assert('is_string($stickyKey) || is_int($stickyKey)'); assert('is_bool($runReport)'); if (null != $stickyData = StickyReportUtil::getDataByKey($stickyKey)) { StickyReportUtil::resolveStickyDataToReport($report, $stickyData); } $pageSize = Yii::app()->pagination->resolveActiveForCurrentUserByType( 'reportResultsListPageSize', get_class($this->getModule())); $dataProvider = ReportDataProviderFactory::makeByReport($report, $pageSize); if (!($dataProvider instanceof MatrixReportDataProvider)) { $totalItems = intval($dataProvider->calculateTotalItemCount()); $dataProvider->getPagination()->setPageSize($totalItems); } if ($runReport) { $dataProvider->setRunReport($runReport); } return $dataProvider; } protected function resolveMetadataBeforeMakingDataProvider(& $metadata) { $metadata = SavedReportUtil::resolveSearchAttributeDataByModuleClassNames($metadata, Report::getReportableModulesClassNamesCurrentUserHasAccessTo()); } } ?>
maruthisivaprasad/zurmo
app/protected/modules/reports/controllers/DefaultController1.php
PHP
agpl-3.0
30,234
/* * This library is part of OpenCms - * the Open Source Content Management System * * Copyright (c) Alkacon Software GmbH & Co. KG (http://www.alkacon.com) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * For further information about Alkacon Software, please see the * company website: http://www.alkacon.com * * For further information about OpenCms, please see the * project website: http://www.opencms.org * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package org.opencms.gwt.shared.sort; import java.util.Comparator; /** * Comparator for objects with a type property.<p> * * @see I_CmsHasType * * @since 8.0.0 */ public class CmsComparatorType implements Comparator<I_CmsHasType> { /** Sort order flag. */ private boolean m_ascending; /** * Constructor.<p> * * @param ascending if <code>true</code> order is ascending */ public CmsComparatorType(boolean ascending) { m_ascending = ascending; } /** * @see java.util.Comparator#compare(java.lang.Object, java.lang.Object) */ public int compare(I_CmsHasType o1, I_CmsHasType o2) { int result = o1.getType().compareTo(o2.getType()); return m_ascending ? result : -result; } }
ggiudetti/opencms-core
src/org/opencms/gwt/shared/sort/CmsComparatorType.java
Java
lgpl-2.1
1,889
/* eXokernel Development Kit (XDK) Based on code by Samsung Research America Copyright (C) 2013 The GNU C Library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. The GNU C Library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with the GNU C Library; if not, see <http://www.gnu.org/licenses/>. As a special exception, if you link the code in this file with files compiled with a GNU compiler to produce an executable, that does not cause the resulting executable to be covered by the GNU Lesser General Public License. This exception does not however invalidate any other reasons why the executable file might be covered by the GNU Lesser General Public License. This exception applies to code released by its copyright holders in files containing the exception. */ #include <stdio.h> #include <stdarg.h> #include <common/logging.h> #include "nvme_common.h" #ifdef NVME_VERBOSE void NVME_INFO(const char *fmt, ...) { printf(NORMAL_MAGENTA); va_list list; va_start(list, fmt); printf("[NVME]:"); vprintf(fmt, list); va_end(list); printf(RESET); } #endif
dwaddington/xdk
drivers/nvme-ssd/nvme_common.cc
C++
lgpl-2.1
1,621
package railo.runtime.search.lucene2.query; import railo.commons.lang.StringUtil; public final class Concator implements Op { private Op left; private Op right; public Concator(Op left,Op right) { this.left=left; this.right=right; } @Override public String toString() { if(left instanceof Literal && right instanceof Literal) { String str=((Literal)left).literal+" "+((Literal)right).literal; return "\""+StringUtil.replace(str, "\"", "\"\"", false)+"\""; } return left+" "+right; } }
modius/railo
railo-java/railo-core/src/railo/runtime/search/lucene2/query/Concator.java
Java
lgpl-2.1
516
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class XorgServer(AutotoolsPackage, XorgPackage): """X.Org Server is the free and open source implementation of the display server for the X Window System stewarded by the X.Org Foundation.""" homepage = "http://cgit.freedesktop.org/xorg/xserver" xorg_mirror_path = "xserver/xorg-server-1.18.99.901.tar.gz" version('1.18.99.901', sha256='c8425163b588de2ee7e5c8e65b0749f2710f55a7e02a8d1dc83b3630868ceb21') depends_on('pixman@0.27.2:') depends_on('font-util') depends_on('libxshmfence@1.1:') depends_on('libdrm@2.3.0:') depends_on('libx11') depends_on('dri2proto@2.8:', type='build') depends_on('dri3proto@1.0:', type='build') depends_on('glproto@1.4.17:', type='build') depends_on('flex', type='build') depends_on('bison', type='build') depends_on('pkgconfig', type='build') depends_on('util-macros', type='build') depends_on('fixesproto@5.0:') depends_on('damageproto@1.1:') depends_on('xcmiscproto@1.2.0:') depends_on('xtrans@1.3.5:') depends_on('bigreqsproto@1.1.0:') depends_on('xproto@7.0.28:') depends_on('randrproto@1.5.0:') depends_on('renderproto@0.11:') depends_on('xextproto@7.2.99.901:') depends_on('inputproto@2.3:') depends_on('kbproto@1.0.3:') depends_on('fontsproto@2.1.3:') depends_on('pixman@0.27.2:') depends_on('videoproto') depends_on('compositeproto@0.4:') depends_on('recordproto@1.13.99.1:') depends_on('scrnsaverproto@1.1:') depends_on('resourceproto@1.2.0:') depends_on('xf86driproto@2.1.0:') depends_on('glproto@1.4.17:') depends_on('presentproto@1.0:') depends_on('xineramaproto') depends_on('libxkbfile') depends_on('libxfont2') depends_on('libxext') depends_on('libxdamage') depends_on('libxfixes') depends_on('libepoxy')
iulian787/spack
var/spack/repos/builtin/packages/xorg-server/package.py
Python
lgpl-2.1
2,055
package org.hivedb.hibernate; import org.hibernate.HibernateException; import org.hibernate.Session; import org.hibernate.shards.session.OpenSessionEvent; import java.sql.SQLException; public class RecordNodeOpenSessionEvent implements OpenSessionEvent { public static ThreadLocal<String> node = new ThreadLocal<String>(); public static String getNode() { return node.get(); } public static void setNode(Session session) { node.set(getNode(session)); } public void onOpenSession(Session session) { setNode(session); } @SuppressWarnings("deprecation") private static String getNode(Session session) { String node = ""; if (session != null) { try { node = session.connection().getMetaData().getURL(); } catch (SQLException ex) { } catch (HibernateException ex) { } } return node; } }
britt/hivedb
src/main/java/org/hivedb/hibernate/RecordNodeOpenSessionEvent.java
Java
lgpl-2.1
834
############################################################################## # Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/llnl/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import spack.cmd.location import spack.modules description = "cd to spack directories in the shell" section = "environment" level = "long" def setup_parser(subparser): """This is for decoration -- spack cd is used through spack's shell support. This allows spack cd to print a descriptive help message when called with -h.""" spack.cmd.location.setup_parser(subparser) def cd(parser, args): spack.modules.print_help()
TheTimmy/spack
lib/spack/spack/cmd/cd.py
Python
lgpl-2.1
1,684
/**************************************************************************** ** ** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies). ** Contact: http://www.qt-project.org/legal ** ** This file is part of Qt Creator. ** ** Commercial License Usage ** Licensees holding valid commercial Qt licenses may use this file in ** accordance with the commercial license agreement provided with the ** Software or, alternatively, in accordance with the terms contained in ** a written agreement between you and Digia. For licensing terms and ** conditions see http://qt.digia.com/licensing. For further information ** use the contact form at http://qt.digia.com/contact-us. ** ** GNU Lesser General Public License Usage ** Alternatively, this file may be used under the terms of the GNU Lesser ** General Public License version 2.1 as published by the Free Software ** Foundation and appearing in the file LICENSE.LGPL included in the ** packaging of this file. Please review the following information to ** ensure the GNU Lesser General Public License version 2.1 requirements ** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. ** ** In addition, as a special exception, Digia gives you certain additional ** rights. These rights are described in the Digia Qt LGPL Exception ** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. ** ****************************************************************************/ #include "qmlprofilertraceview.h" #include "qmlprofilertool.h" #include "qmlprofilerstatemanager.h" #include "qmlprofilerdatamodel.h" // Needed for the load&save actions in the context menu #include <analyzerbase/ianalyzertool.h> // Comunication with the other views (limit events to range) #include "qmlprofilerviewmanager.h" #include <utils/styledbar.h> #include <QDeclarativeContext> #include <QToolButton> #include <QEvent> #include <QVBoxLayout> #include <QGraphicsObject> #include <QScrollBar> #include <QSlider> #include <QMenu> #include <math.h> using namespace QmlDebug; namespace QmlProfiler { namespace Internal { const int sliderTicks = 10000; const qreal sliderExp = 3; ///////////////////////////////////////////////////////// bool MouseWheelResizer::eventFilter(QObject *obj, QEvent *event) { if (event->type() == QEvent::Wheel) { QWheelEvent *ev = static_cast<QWheelEvent *>(event); if (ev->modifiers() & Qt::ControlModifier) { emit mouseWheelMoved(ev->pos().x(), ev->pos().y(), ev->delta()); return true; } } return QObject::eventFilter(obj, event); } ///////////////////////////////////////////////////////// void ZoomControl::setRange(qint64 startTime, qint64 endTime) { if (m_startTime != startTime || m_endTime != endTime) { m_startTime = startTime; m_endTime = endTime; emit rangeChanged(); } } ///////////////////////////////////////////////////////// ScrollableDeclarativeView::ScrollableDeclarativeView(QWidget *parent) : QDeclarativeView(parent) { } ScrollableDeclarativeView::~ScrollableDeclarativeView() { } void ScrollableDeclarativeView::scrollContentsBy(int dx, int dy) { // special workaround to track the scrollbar if (rootObject()) { int scrollY = rootObject()->property("scrollY").toInt(); rootObject()->setProperty("scrollY", QVariant(scrollY - dy)); } QDeclarativeView::scrollContentsBy(dx,dy); } ///////////////////////////////////////////////////////// class QmlProfilerTraceView::QmlProfilerTraceViewPrivate { public: QmlProfilerTraceViewPrivate(QmlProfilerTraceView *qq) : q(qq) {} QmlProfilerTraceView *q; QmlProfilerStateManager *m_profilerState; Analyzer::IAnalyzerTool *m_profilerTool; QmlProfilerViewManager *m_viewContainer; QSize m_sizeHint; ScrollableDeclarativeView *m_mainView; QDeclarativeView *m_timebar; QDeclarativeView *m_overview; QmlProfilerDataModel *m_profilerDataModel; ZoomControl *m_zoomControl; QToolButton *m_buttonRange; QToolButton *m_buttonLock; QWidget *m_zoomToolbar; int m_currentZoomLevel; }; QmlProfilerTraceView::QmlProfilerTraceView(QWidget *parent, Analyzer::IAnalyzerTool *profilerTool, QmlProfilerViewManager *container, QmlProfilerDataModel *model, QmlProfilerStateManager *profilerState) : QWidget(parent), d(new QmlProfilerTraceViewPrivate(this)) { setObjectName(QLatin1String("QML Profiler")); d->m_zoomControl = new ZoomControl(this); connect(d->m_zoomControl, SIGNAL(rangeChanged()), this, SLOT(updateRange())); QVBoxLayout *groupLayout = new QVBoxLayout; groupLayout->setContentsMargins(0, 0, 0, 0); groupLayout->setSpacing(0); d->m_mainView = new ScrollableDeclarativeView(this); d->m_mainView->setResizeMode(QDeclarativeView::SizeViewToRootObject); d->m_mainView->setVerticalScrollBarPolicy(Qt::ScrollBarAsNeeded); d->m_mainView->setBackgroundBrush(QBrush(Qt::white)); d->m_mainView->setAlignment(Qt::AlignLeft | Qt::AlignTop); d->m_mainView->setFocus(); MouseWheelResizer *resizer = new MouseWheelResizer(this); connect(resizer,SIGNAL(mouseWheelMoved(int,int,int)), this, SLOT(mouseWheelMoved(int,int,int))); d->m_mainView->viewport()->installEventFilter(resizer); QHBoxLayout *toolsLayout = new QHBoxLayout; d->m_timebar = new QDeclarativeView(this); d->m_timebar->setResizeMode(QDeclarativeView::SizeRootObjectToView); d->m_timebar->setSizePolicy(QSizePolicy::Expanding, QSizePolicy::Fixed); d->m_timebar->setFixedHeight(24); d->m_overview = new QDeclarativeView(this); d->m_overview->setResizeMode(QDeclarativeView::SizeRootObjectToView); d->m_overview->setSizePolicy(QSizePolicy::Expanding, QSizePolicy::Fixed); d->m_overview->setMaximumHeight(50); d->m_zoomToolbar = createZoomToolbar(); d->m_zoomToolbar->move(0, d->m_timebar->height()); d->m_zoomToolbar->setVisible(false); toolsLayout->addWidget(createToolbar()); toolsLayout->addWidget(d->m_timebar); emit enableToolbar(false); groupLayout->addLayout(toolsLayout); groupLayout->addWidget(d->m_mainView); groupLayout->addWidget(d->m_overview); setLayout(groupLayout); d->m_profilerTool = profilerTool; d->m_viewContainer = container; d->m_profilerDataModel = model; connect(d->m_profilerDataModel, SIGNAL(stateChanged()), this, SLOT(profilerDataModelStateChanged())); d->m_mainView->rootContext()->setContextProperty(QLatin1String("qmlProfilerDataModel"), d->m_profilerDataModel); d->m_overview->rootContext()->setContextProperty(QLatin1String("qmlProfilerDataModel"), d->m_profilerDataModel); d->m_profilerState = profilerState; connect(d->m_profilerState, SIGNAL(stateChanged()), this, SLOT(profilerStateChanged())); connect(d->m_profilerState, SIGNAL(clientRecordingChanged()), this, SLOT(clientRecordingChanged())); connect(d->m_profilerState, SIGNAL(serverRecordingChanged()), this, SLOT(serverRecordingChanged())); // Minimum height: 5 rows of 20 pixels + scrollbar of 50 pixels + 20 pixels margin setMinimumHeight(170); d->m_currentZoomLevel = 0; } QmlProfilerTraceView::~QmlProfilerTraceView() { delete d; } ///////////////////////////////////////////////////////// // Initialize widgets void QmlProfilerTraceView::reset() { d->m_mainView->rootContext()->setContextProperty(QLatin1String("zoomControl"), d->m_zoomControl); d->m_timebar->rootContext()->setContextProperty(QLatin1String("zoomControl"), d->m_zoomControl); d->m_overview->rootContext()->setContextProperty(QLatin1String("zoomControl"), d->m_zoomControl); d->m_timebar->setSource(QUrl(QLatin1String("qrc:/qmlprofiler/TimeDisplay.qml"))); d->m_overview->setSource(QUrl(QLatin1String("qrc:/qmlprofiler/Overview.qml"))); d->m_mainView->setSource(QUrl(QLatin1String("qrc:/qmlprofiler/MainView.qml"))); QGraphicsObject *rootObject = d->m_mainView->rootObject(); rootObject->setProperty("width", QVariant(width())); rootObject->setProperty("candidateHeight", QVariant(height() - d->m_timebar->height() - d->m_overview->height())); connect(rootObject, SIGNAL(updateCursorPosition()), this, SLOT(updateCursorPosition())); connect(rootObject, SIGNAL(updateRangeButton()), this, SLOT(updateRangeButton())); connect(rootObject, SIGNAL(updateLockButton()), this, SLOT(updateLockButton())); connect(this, SIGNAL(jumpToPrev()), rootObject, SLOT(prevEvent())); connect(this, SIGNAL(jumpToNext()), rootObject, SLOT(nextEvent())); connect(rootObject, SIGNAL(selectedEventChanged(int)), this, SIGNAL(selectedEventChanged(int))); connect(rootObject, SIGNAL(changeToolTip(QString)), this, SLOT(updateToolTip(QString))); connect(rootObject, SIGNAL(updateVerticalScroll(int)), this, SLOT(updateVerticalScroll(int))); } QWidget *QmlProfilerTraceView::createToolbar() { Utils::StyledBar *bar = new Utils::StyledBar(this); bar->setStyleSheet(QLatin1String("background: #9B9B9B")); bar->setSingleRow(true); bar->setFixedWidth(150); bar->setFixedHeight(24); QHBoxLayout *toolBarLayout = new QHBoxLayout(bar); toolBarLayout->setMargin(0); toolBarLayout->setSpacing(0); QToolButton *buttonPrev= new QToolButton; buttonPrev->setIcon(QIcon(QLatin1String(":/qmlprofiler/ico_prev.png"))); buttonPrev->setToolTip(tr("Jump to previous event")); connect(buttonPrev, SIGNAL(clicked()), this, SIGNAL(jumpToPrev())); connect(this, SIGNAL(enableToolbar(bool)), buttonPrev, SLOT(setEnabled(bool))); QToolButton *buttonNext= new QToolButton; buttonNext->setIcon(QIcon(QLatin1String(":/qmlprofiler/ico_next.png"))); buttonNext->setToolTip(tr("Jump to next event")); connect(buttonNext, SIGNAL(clicked()), this, SIGNAL(jumpToNext())); connect(this, SIGNAL(enableToolbar(bool)), buttonNext, SLOT(setEnabled(bool))); QToolButton *buttonZoomControls = new QToolButton; buttonZoomControls->setIcon(QIcon(QLatin1String(":/qmlprofiler/ico_zoom.png"))); buttonZoomControls->setToolTip(tr("Show zoom slider")); buttonZoomControls->setCheckable(true); buttonZoomControls->setChecked(false); connect(buttonZoomControls, SIGNAL(toggled(bool)), d->m_zoomToolbar, SLOT(setVisible(bool))); connect(this, SIGNAL(enableToolbar(bool)), buttonZoomControls, SLOT(setEnabled(bool))); d->m_buttonRange = new QToolButton; d->m_buttonRange->setIcon(QIcon(QLatin1String(":/qmlprofiler/ico_rangeselection.png"))); d->m_buttonRange->setToolTip(tr("Select range")); d->m_buttonRange->setCheckable(true); d->m_buttonRange->setChecked(false); connect(d->m_buttonRange, SIGNAL(clicked(bool)), this, SLOT(toggleRangeMode(bool))); connect(this, SIGNAL(enableToolbar(bool)), d->m_buttonRange, SLOT(setEnabled(bool))); connect(this, SIGNAL(rangeModeChanged(bool)), d->m_buttonRange, SLOT(setChecked(bool))); d->m_buttonLock = new QToolButton; d->m_buttonLock->setIcon(QIcon(QLatin1String(":/qmlprofiler/ico_selectionmode.png"))); d->m_buttonLock->setToolTip(tr("View event information on mouseover")); d->m_buttonLock->setCheckable(true); d->m_buttonLock->setChecked(false); connect(d->m_buttonLock, SIGNAL(clicked(bool)), this, SLOT(toggleLockMode(bool))); connect(this, SIGNAL(enableToolbar(bool)), d->m_buttonLock, SLOT(setEnabled(bool))); connect(this, SIGNAL(lockModeChanged(bool)), d->m_buttonLock, SLOT(setChecked(bool))); toolBarLayout->addWidget(buttonPrev); toolBarLayout->addWidget(buttonNext); toolBarLayout->addWidget(new Utils::StyledSeparator()); toolBarLayout->addWidget(buttonZoomControls); toolBarLayout->addWidget(new Utils::StyledSeparator()); toolBarLayout->addWidget(d->m_buttonRange); toolBarLayout->addWidget(d->m_buttonLock); return bar; } QWidget *QmlProfilerTraceView::createZoomToolbar() { Utils::StyledBar *bar = new Utils::StyledBar(this); bar->setStyleSheet(QLatin1String("background: #9B9B9B")); bar->setSingleRow(true); bar->setFixedWidth(150); bar->setFixedHeight(24); QHBoxLayout *toolBarLayout = new QHBoxLayout(bar); toolBarLayout->setMargin(0); toolBarLayout->setSpacing(0); QSlider *zoomSlider = new QSlider(Qt::Horizontal); zoomSlider->setFocusPolicy(Qt::NoFocus); zoomSlider->setRange(1, sliderTicks); zoomSlider->setInvertedAppearance(true); zoomSlider->setPageStep(sliderTicks/100); connect(this, SIGNAL(enableToolbar(bool)), zoomSlider, SLOT(setEnabled(bool))); connect(zoomSlider, SIGNAL(valueChanged(int)), this, SLOT(setZoomLevel(int))); connect(this, SIGNAL(zoomLevelChanged(int)), zoomSlider, SLOT(setValue(int))); zoomSlider->setStyleSheet(QLatin1String("\ QSlider:horizontal {\ background: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, stop: 0 #444444, stop: 1 #5a5a5a);\ border: 1px #313131;\ height: 20px;\ margin: 0px 0px 0px 0px;\ }\ QSlider::add-page:horizontal {\ background: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, stop: 0 #5a5a5a, stop: 1 #444444);\ border: 1px #313131;\ }\ QSlider::sub-page:horizontal {\ background: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, stop: 0 #5a5a5a, stop: 1 #444444);\ border: 1px #313131;\ }\ ")); toolBarLayout->addWidget(zoomSlider); return bar; } ///////////////////////////////////////////////////////// bool QmlProfilerTraceView::hasValidSelection() const { QGraphicsObject *rootObject = d->m_mainView->rootObject(); if (rootObject) return rootObject->property("selectionRangeReady").toBool(); return false; } qint64 QmlProfilerTraceView::selectionStart() const { QGraphicsObject *rootObject = d->m_mainView->rootObject(); if (rootObject) return rootObject->property("selectionRangeStart").toLongLong(); return 0; } qint64 QmlProfilerTraceView::selectionEnd() const { QGraphicsObject *rootObject = d->m_mainView->rootObject(); if (rootObject) return rootObject->property("selectionRangeEnd").toLongLong(); return 0; } void QmlProfilerTraceView::clearDisplay() { d->m_zoomControl->setRange(0,0); updateVerticalScroll(0); d->m_mainView->rootObject()->setProperty("scrollY", QVariant(0)); QMetaObject::invokeMethod(d->m_mainView->rootObject(), "clearAll"); QMetaObject::invokeMethod(d->m_overview->rootObject(), "clearDisplay"); } void QmlProfilerTraceView::selectNextEventWithId(int eventId) { QGraphicsObject *rootObject = d->m_mainView->rootObject(); if (rootObject) QMetaObject::invokeMethod(rootObject, "selectNextWithId", Q_ARG(QVariant,QVariant(eventId))); } ///////////////////////////////////////////////////////// // Goto source location void QmlProfilerTraceView::updateCursorPosition() { QGraphicsObject *rootObject = d->m_mainView->rootObject(); emit gotoSourceLocation(rootObject->property("fileName").toString(), rootObject->property("lineNumber").toInt(), rootObject->property("columnNumber").toInt()); } ///////////////////////////////////////////////////////// // Toolbar buttons void QmlProfilerTraceView::toggleRangeMode(bool active) { QGraphicsObject *rootObject = d->m_mainView->rootObject(); bool rangeMode = rootObject->property("selectionRangeMode").toBool(); if (active != rangeMode) { if (active) d->m_buttonRange->setIcon(QIcon(QLatin1String(":/qmlprofiler/ico_rangeselected.png"))); else d->m_buttonRange->setIcon(QIcon(QLatin1String(":/qmlprofiler/ico_rangeselection.png"))); rootObject->setProperty("selectionRangeMode", QVariant(active)); } } void QmlProfilerTraceView::updateRangeButton() { bool rangeMode = d->m_mainView->rootObject()->property("selectionRangeMode").toBool(); if (rangeMode) d->m_buttonRange->setIcon(QIcon(QLatin1String(":/qmlprofiler/ico_rangeselected.png"))); else d->m_buttonRange->setIcon(QIcon(QLatin1String(":/qmlprofiler/ico_rangeselection.png"))); emit rangeModeChanged(rangeMode); } void QmlProfilerTraceView::toggleLockMode(bool active) { QGraphicsObject *rootObject = d->m_mainView->rootObject(); bool lockMode = !rootObject->property("selectionLocked").toBool(); if (active != lockMode) { rootObject->setProperty("selectionLocked", QVariant(!active)); rootObject->setProperty("selectedItem", QVariant(-1)); } } void QmlProfilerTraceView::updateLockButton() { bool lockMode = !d->m_mainView->rootObject()->property("selectionLocked").toBool(); emit lockModeChanged(lockMode); } //////////////////////////////////////////////////////// // Zoom control void QmlProfilerTraceView::setZoomLevel(int zoomLevel) { if (d->m_currentZoomLevel != zoomLevel && d->m_mainView->rootObject()) { QVariant newFactor = pow(qreal(zoomLevel) / qreal(sliderTicks), sliderExp); d->m_currentZoomLevel = zoomLevel; QMetaObject::invokeMethod(d->m_mainView->rootObject(), "updateWindowLength", Q_ARG(QVariant, newFactor)); } } void QmlProfilerTraceView::updateRange() { if (!d->m_profilerDataModel) return; qreal duration = d->m_zoomControl->endTime() - d->m_zoomControl->startTime(); if (duration <= 0) return; if (d->m_profilerDataModel->traceDuration() <= 0) return; int newLevel = pow(duration / d->m_profilerDataModel->traceDuration(), 1/sliderExp) * sliderTicks; if (d->m_currentZoomLevel != newLevel) { d->m_currentZoomLevel = newLevel; emit zoomLevelChanged(newLevel); } } void QmlProfilerTraceView::mouseWheelMoved(int mouseX, int mouseY, int wheelDelta) { Q_UNUSED(mouseY); QGraphicsObject *rootObject = d->m_mainView->rootObject(); if (rootObject) { QMetaObject::invokeMethod(rootObject, "wheelZoom", Q_ARG(QVariant, QVariant(mouseX)), Q_ARG(QVariant, QVariant(wheelDelta))); } } //////////////////////////////////////////////////////// void QmlProfilerTraceView::updateToolTip(const QString &text) { setToolTip(text); } void QmlProfilerTraceView::updateVerticalScroll(int newPosition) { d->m_mainView->verticalScrollBar()->setValue(newPosition); } void QmlProfilerTraceView::resizeEvent(QResizeEvent *event) { QWidget::resizeEvent(event); QGraphicsObject *rootObject = d->m_mainView->rootObject(); if (rootObject) { rootObject->setProperty("width", QVariant(event->size().width())); int newHeight = event->size().height() - d->m_timebar->height() - d->m_overview->height(); rootObject->setProperty("candidateHeight", QVariant(newHeight)); } emit resized(); } //////////////////////////////////////////////////////////////// // Context menu void QmlProfilerTraceView::contextMenuEvent(QContextMenuEvent *ev) { QMenu menu; QAction *viewAllAction = 0; QmlProfilerTool *profilerTool = qobject_cast<QmlProfilerTool *>(d->m_profilerTool); if (profilerTool) menu.addActions(profilerTool->profilerContextMenuActions()); menu.addSeparator(); QAction *getLocalStatsAction = menu.addAction(tr("Limit Events Pane to Current Range")); if (!d->m_viewContainer->hasValidSelection()) getLocalStatsAction->setEnabled(false); QAction *getGlobalStatsAction = menu.addAction(tr("Reset Events Pane")); if (d->m_viewContainer->hasGlobalStats()) getGlobalStatsAction->setEnabled(false); if (d->m_profilerDataModel->count() > 0) { menu.addSeparator(); viewAllAction = menu.addAction(tr("Reset Zoom")); } QAction *selectedAction = menu.exec(ev->globalPos()); if (selectedAction) { if (selectedAction == viewAllAction) { d->m_zoomControl->setRange( d->m_profilerDataModel->traceStartTime(), d->m_profilerDataModel->traceEndTime()); } if (selectedAction == getLocalStatsAction) { d->m_viewContainer->getStatisticsInRange( d->m_viewContainer->selectionStart(), d->m_viewContainer->selectionEnd()); } if (selectedAction == getGlobalStatsAction) { d->m_viewContainer->getStatisticsInRange( d->m_profilerDataModel->traceStartTime(), d->m_profilerDataModel->traceEndTime()); } } } ///////////////////////////////////////////////// // Tell QML the state of the profiler void QmlProfilerTraceView::setRecording(bool recording) { QGraphicsObject *rootObject = d->m_mainView->rootObject(); if (rootObject) rootObject->setProperty("recordingEnabled", QVariant(recording)); } void QmlProfilerTraceView::setAppKilled() { QGraphicsObject *rootObject = d->m_mainView->rootObject(); if (rootObject) rootObject->setProperty("appKilled",QVariant(true)); } //////////////////////////////////////////////////////////////// // Profiler State void QmlProfilerTraceView::profilerDataModelStateChanged() { switch (d->m_profilerDataModel->currentState()) { case QmlProfilerDataModel::Empty : emit enableToolbar(false); break; case QmlProfilerDataModel::AcquiringData : // nothing to be done break; case QmlProfilerDataModel::ProcessingData : // nothing to be done break; case QmlProfilerDataModel::Done : emit enableToolbar(true); break; default: break; } } void QmlProfilerTraceView::profilerStateChanged() { switch (d->m_profilerState->currentState()) { case QmlProfilerStateManager::AppKilled : { if (d->m_profilerDataModel->currentState() == QmlProfilerDataModel::AcquiringData) setAppKilled(); break; } default: // no special action needed for other states break; } } void QmlProfilerTraceView::clientRecordingChanged() { // nothing yet } void QmlProfilerTraceView::serverRecordingChanged() { setRecording(d->m_profilerState->serverRecording()); } } // namespace Internal } // namespace QmlProfiler
duythanhphan/qt-creator
src/plugins/qmlprofiler/qmlprofilertraceview.cpp
C++
lgpl-2.1
22,504
package railo.runtime.functions.dateTime; import java.util.TimeZone; import railo.runtime.PageContext; import railo.runtime.exp.ExpressionException; import railo.runtime.ext.function.Function; import railo.runtime.tag.util.DeprecatedUtil; import railo.runtime.type.dt.DateTime; import railo.runtime.type.dt.DateTimeImpl; /** * Implements the CFML Function now * @deprecated removed with no replacement */ public final class NowServer implements Function { /** * @param pc * @return server time * @throws ExpressionException */ public static DateTime call(PageContext pc ) throws ExpressionException { DeprecatedUtil.function(pc,"nowServer"); long now = System.currentTimeMillis(); int railo = pc.getTimeZone().getOffset(now); int server = TimeZone.getDefault().getOffset(now); return new DateTimeImpl(pc,now-(railo-server),false); } }
JordanReiter/railo
railo-java/railo-core/src/railo/runtime/functions/dateTime/NowServer.java
Java
lgpl-2.1
870
/****************************************************************/ /* MOOSE - Multiphysics Object Oriented Simulation Environment */ /* */ /* All contents are licensed under LGPL V2.1 */ /* See LICENSE for full restrictions */ /****************************************************************/ #include "KKSACBulkF.h" template<> InputParameters validParams<KKSACBulkF>() { InputParameters params = validParams<KKSACBulkBase>(); // params.addClassDescription("KKS model kernel for the Bulk Allen-Cahn. This operates on the order parameter 'eta' as the non-linear variable"); params.addRequiredParam<Real>("w", "Double well height parameter"); params.addParam<MaterialPropertyName>("g_name", "g", "Base name for the double well function g(eta)"); return params; } KKSACBulkF::KKSACBulkF(const InputParameters & parameters) : KKSACBulkBase(parameters), _w(getParam<Real>("w")), _prop_dg(getMaterialPropertyDerivative<Real>("g_name", _eta_name)), _prop_d2g(getMaterialPropertyDerivative<Real>("g_name", _eta_name, _eta_name)) { } Real KKSACBulkF::computeDFDOP(PFFunctionType type) { Real res = 0.0; Real A1 = _prop_Fa[_qp] - _prop_Fb[_qp]; switch (type) { case Residual: return -_prop_dh[_qp] * A1 + _w * _prop_dg[_qp]; case Jacobian: { res = -_prop_d2h[_qp] * A1 + _w * _prop_d2g[_qp]; // the -\frac{dh}{d\eta}\left(\frac{dF_a}{d\eta}-\frac{dF_b}{d\eta}\right) // term is handled in KKSACBulkC! return _phi[_j][_qp] * res; } } mooseError("Invalid type passed in"); } Real KKSACBulkF::computeQpOffDiagJacobian(unsigned int jvar) { // get the coupled variable jvar is referring to unsigned int cvar; if (!mapJvarToCvar(jvar, cvar)) return 0.0; Real res = _prop_dh[_qp] * ( (*_derivatives_Fa[cvar])[_qp] - (*_derivatives_Fb[cvar])[_qp]) * _phi[_j][_qp]; return res * _test[_j][_qp]; } // DEPRECATED CONSTRUCTOR KKSACBulkF::KKSACBulkF(const std::string & deprecated_name, InputParameters parameters) : KKSACBulkBase(deprecated_name, parameters), _w(getParam<Real>("w")), _prop_dg(getMaterialPropertyDerivative<Real>("g_name", _eta_name)), _prop_d2g(getMaterialPropertyDerivative<Real>("g_name", _eta_name, _eta_name)) { }
raghavaggarwal/moose
modules/phase_field/src/kernels/KKSACBulkF.C
C++
lgpl-2.1
2,419
// Boost.Geometry Index // // R-tree spatial query visitor implementation // // Copyright (c) 2011-2014 Adam Wulkiewicz, Lodz, Poland. // // This file was modified by Oracle on 2019-2021. // Modifications copyright (c) 2019-2021 Oracle and/or its affiliates. // Contributed and/or modified by Adam Wulkiewicz, on behalf of Oracle // // Use, modification and distribution is subject to the Boost Software License, // Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) #ifndef BOOST_GEOMETRY_INDEX_DETAIL_RTREE_VISITORS_SPATIAL_QUERY_HPP #define BOOST_GEOMETRY_INDEX_DETAIL_RTREE_VISITORS_SPATIAL_QUERY_HPP #include <boost/geometry/index/detail/rtree/node/node_elements.hpp> #include <boost/geometry/index/detail/predicates.hpp> #include <boost/geometry/index/parameters.hpp> namespace boost { namespace geometry { namespace index { namespace detail { namespace rtree { namespace visitors { template <typename MembersHolder, typename Predicates, typename OutIter> struct spatial_query { typedef typename MembersHolder::parameters_type parameters_type; typedef typename MembersHolder::translator_type translator_type; typedef typename MembersHolder::allocators_type allocators_type; typedef typename index::detail::strategy_type<parameters_type>::type strategy_type; typedef typename MembersHolder::node node; typedef typename MembersHolder::internal_node internal_node; typedef typename MembersHolder::leaf leaf; typedef typename allocators_type::node_pointer node_pointer; typedef typename allocators_type::size_type size_type; spatial_query(MembersHolder const& members, Predicates const& p, OutIter out_it) : m_tr(members.translator()) , m_strategy(index::detail::get_strategy(members.parameters())) , m_pred(p) , m_out_iter(out_it) , m_found_count(0) {} size_type apply(node_pointer ptr, size_type reverse_level) { namespace id = index::detail; if (reverse_level > 0) { internal_node& n = rtree::get<internal_node>(*ptr); // traverse nodes meeting predicates for (auto const& p : rtree::elements(n)) { // if node meets predicates (0 is dummy value) if (id::predicates_check<id::bounds_tag>(m_pred, 0, p.first, m_strategy)) { apply(p.second, reverse_level - 1); } } } else { leaf& n = rtree::get<leaf>(*ptr); // get all values meeting predicates for (auto const& v : rtree::elements(n)) { // if value meets predicates if (id::predicates_check<id::value_tag>(m_pred, v, m_tr(v), m_strategy)) { *m_out_iter = v; ++m_out_iter; ++m_found_count; } } } return m_found_count; } size_type apply(MembersHolder const& members) { return apply(members.root, members.leafs_level); } private: translator_type const& m_tr; strategy_type m_strategy; Predicates const& m_pred; OutIter m_out_iter; size_type m_found_count; }; template <typename MembersHolder, typename Predicates> class spatial_query_incremental { typedef typename MembersHolder::value_type value_type; typedef typename MembersHolder::parameters_type parameters_type; typedef typename MembersHolder::translator_type translator_type; typedef typename MembersHolder::allocators_type allocators_type; typedef typename index::detail::strategy_type<parameters_type>::type strategy_type; typedef typename MembersHolder::node node; typedef typename MembersHolder::internal_node internal_node; typedef typename MembersHolder::leaf leaf; typedef typename allocators_type::size_type size_type; typedef typename allocators_type::const_reference const_reference; typedef typename allocators_type::node_pointer node_pointer; typedef typename rtree::elements_type<internal_node>::type::const_iterator internal_iterator; typedef typename rtree::elements_type<leaf>::type leaf_elements; typedef typename rtree::elements_type<leaf>::type::const_iterator leaf_iterator; struct internal_data { internal_data(internal_iterator f, internal_iterator l, size_type rl) : first(f), last(l), reverse_level(rl) {} internal_iterator first; internal_iterator last; size_type reverse_level; }; public: spatial_query_incremental() : m_translator(nullptr) // , m_strategy() // , m_pred() , m_values(nullptr) , m_current() {} spatial_query_incremental(Predicates const& p) : m_translator(nullptr) // , m_strategy() , m_pred(p) , m_values(nullptr) , m_current() {} spatial_query_incremental(MembersHolder const& members, Predicates const& p) : m_translator(::boost::addressof(members.translator())) , m_strategy(index::detail::get_strategy(members.parameters())) , m_pred(p) , m_values(nullptr) , m_current() {} const_reference dereference() const { BOOST_GEOMETRY_INDEX_ASSERT(m_values, "not dereferencable"); return *m_current; } void initialize(MembersHolder const& members) { apply(members.root, members.leafs_level); search_value(); } void increment() { ++m_current; search_value(); } bool is_end() const { return 0 == m_values; } friend bool operator==(spatial_query_incremental const& l, spatial_query_incremental const& r) { return (l.m_values == r.m_values) && (0 == l.m_values || l.m_current == r.m_current); } private: void apply(node_pointer ptr, size_type reverse_level) { namespace id = index::detail; if (reverse_level > 0) { internal_node& n = rtree::get<internal_node>(*ptr); auto const& elements = rtree::elements(n); m_internal_stack.push_back(internal_data(elements.begin(), elements.end(), reverse_level - 1)); } else { leaf& n = rtree::get<leaf>(*ptr); m_values = ::boost::addressof(rtree::elements(n)); m_current = rtree::elements(n).begin(); } } void search_value() { namespace id = index::detail; for (;;) { // if leaf is choosen, move to the next value in leaf if ( m_values ) { if ( m_current != m_values->end() ) { // return if next value is found value_type const& v = *m_current; if (id::predicates_check<id::value_tag>(m_pred, v, (*m_translator)(v), m_strategy)) { return; } ++m_current; } // no more values, clear current leaf else { m_values = 0; } } // if leaf isn't choosen, move to the next leaf else { // return if there is no more nodes to traverse if (m_internal_stack.empty()) { return; } internal_data& current_data = m_internal_stack.back(); // no more children in current node, remove it from stack if (current_data.first == current_data.last) { m_internal_stack.pop_back(); continue; } internal_iterator it = current_data.first; ++current_data.first; // next node is found, push it to the stack if (id::predicates_check<id::bounds_tag>(m_pred, 0, it->first, m_strategy)) { apply(it->second, current_data.reverse_level); } } } } const translator_type * m_translator; strategy_type m_strategy; Predicates m_pred; std::vector<internal_data> m_internal_stack; const leaf_elements * m_values; leaf_iterator m_current; }; }}} // namespace detail::rtree::visitors }}} // namespace boost::geometry::index #endif // BOOST_GEOMETRY_INDEX_DETAIL_RTREE_VISITORS_SPATIAL_QUERY_HPP
qianqians/abelkhan
cpp_component/3rdparty/boost/boost/geometry/index/detail/rtree/visitors/spatial_query.hpp
C++
lgpl-2.1
8,657
# # Secret Labs' Regular Expression Engine # # various symbols used by the regular expression engine. # run this script to update the _sre include files! # # Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved. # # See the sre.py file for information on usage and redistribution. # """Internal support module for sre""" # update when constants are added or removed MAGIC = 20031017 # max code word in this release MAXREPEAT = 65535 # SRE standard exception (access as sre.error) # should this really be here? class error(Exception): pass # operators FAILURE = "failure" SUCCESS = "success" ANY = "any" ANY_ALL = "any_all" ASSERT = "assert" ASSERT_NOT = "assert_not" AT = "at" BIGCHARSET = "bigcharset" BRANCH = "branch" CALL = "call" CATEGORY = "category" CHARSET = "charset" GROUPREF = "groupref" GROUPREF_IGNORE = "groupref_ignore" GROUPREF_EXISTS = "groupref_exists" IN = "in" IN_IGNORE = "in_ignore" INFO = "info" JUMP = "jump" LITERAL = "literal" LITERAL_IGNORE = "literal_ignore" MARK = "mark" MAX_REPEAT = "max_repeat" MAX_UNTIL = "max_until" MIN_REPEAT = "min_repeat" MIN_UNTIL = "min_until" NEGATE = "negate" NOT_LITERAL = "not_literal" NOT_LITERAL_IGNORE = "not_literal_ignore" RANGE = "range" REPEAT = "repeat" REPEAT_ONE = "repeat_one" SUBPATTERN = "subpattern" MIN_REPEAT_ONE = "min_repeat_one" # positions AT_BEGINNING = "at_beginning" AT_BEGINNING_LINE = "at_beginning_line" AT_BEGINNING_STRING = "at_beginning_string" AT_BOUNDARY = "at_boundary" AT_NON_BOUNDARY = "at_non_boundary" AT_END = "at_end" AT_END_LINE = "at_end_line" AT_END_STRING = "at_end_string" AT_LOC_BOUNDARY = "at_loc_boundary" AT_LOC_NON_BOUNDARY = "at_loc_non_boundary" AT_UNI_BOUNDARY = "at_uni_boundary" AT_UNI_NON_BOUNDARY = "at_uni_non_boundary" # categories CATEGORY_DIGIT = "category_digit" CATEGORY_NOT_DIGIT = "category_not_digit" CATEGORY_SPACE = "category_space" CATEGORY_NOT_SPACE = "category_not_space" CATEGORY_WORD = "category_word" CATEGORY_NOT_WORD = "category_not_word" CATEGORY_LINEBREAK = "category_linebreak" CATEGORY_NOT_LINEBREAK = "category_not_linebreak" CATEGORY_LOC_WORD = "category_loc_word" CATEGORY_LOC_NOT_WORD = "category_loc_not_word" CATEGORY_UNI_DIGIT = "category_uni_digit" CATEGORY_UNI_NOT_DIGIT = "category_uni_not_digit" CATEGORY_UNI_SPACE = "category_uni_space" CATEGORY_UNI_NOT_SPACE = "category_uni_not_space" CATEGORY_UNI_WORD = "category_uni_word" CATEGORY_UNI_NOT_WORD = "category_uni_not_word" CATEGORY_UNI_LINEBREAK = "category_uni_linebreak" CATEGORY_UNI_NOT_LINEBREAK = "category_uni_not_linebreak" OPCODES = [ # failure=0 success=1 (just because it looks better that way :-) FAILURE, SUCCESS, ANY, ANY_ALL, ASSERT, ASSERT_NOT, AT, BRANCH, CALL, CATEGORY, CHARSET, BIGCHARSET, GROUPREF, GROUPREF_EXISTS, GROUPREF_IGNORE, IN, IN_IGNORE, INFO, JUMP, LITERAL, LITERAL_IGNORE, MARK, MAX_UNTIL, MIN_UNTIL, NOT_LITERAL, NOT_LITERAL_IGNORE, NEGATE, RANGE, REPEAT, REPEAT_ONE, SUBPATTERN, MIN_REPEAT_ONE ] ATCODES = [ AT_BEGINNING, AT_BEGINNING_LINE, AT_BEGINNING_STRING, AT_BOUNDARY, AT_NON_BOUNDARY, AT_END, AT_END_LINE, AT_END_STRING, AT_LOC_BOUNDARY, AT_LOC_NON_BOUNDARY, AT_UNI_BOUNDARY, AT_UNI_NON_BOUNDARY ] CHCODES = [ CATEGORY_DIGIT, CATEGORY_NOT_DIGIT, CATEGORY_SPACE, CATEGORY_NOT_SPACE, CATEGORY_WORD, CATEGORY_NOT_WORD, CATEGORY_LINEBREAK, CATEGORY_NOT_LINEBREAK, CATEGORY_LOC_WORD, CATEGORY_LOC_NOT_WORD, CATEGORY_UNI_DIGIT, CATEGORY_UNI_NOT_DIGIT, CATEGORY_UNI_SPACE, CATEGORY_UNI_NOT_SPACE, CATEGORY_UNI_WORD, CATEGORY_UNI_NOT_WORD, CATEGORY_UNI_LINEBREAK, CATEGORY_UNI_NOT_LINEBREAK ] def makedict(list): d = {} i = 0 for item in list: d[item] = i i = i + 1 return d OPCODES = makedict(OPCODES) ATCODES = makedict(ATCODES) CHCODES = makedict(CHCODES) # replacement operations for "ignore case" mode OP_IGNORE = { GROUPREF: GROUPREF_IGNORE, IN: IN_IGNORE, LITERAL: LITERAL_IGNORE, NOT_LITERAL: NOT_LITERAL_IGNORE } AT_MULTILINE = { AT_BEGINNING: AT_BEGINNING_LINE, AT_END: AT_END_LINE } AT_LOCALE = { AT_BOUNDARY: AT_LOC_BOUNDARY, AT_NON_BOUNDARY: AT_LOC_NON_BOUNDARY } AT_UNICODE = { AT_BOUNDARY: AT_UNI_BOUNDARY, AT_NON_BOUNDARY: AT_UNI_NON_BOUNDARY } CH_LOCALE = { CATEGORY_DIGIT: CATEGORY_DIGIT, CATEGORY_NOT_DIGIT: CATEGORY_NOT_DIGIT, CATEGORY_SPACE: CATEGORY_SPACE, CATEGORY_NOT_SPACE: CATEGORY_NOT_SPACE, CATEGORY_WORD: CATEGORY_LOC_WORD, CATEGORY_NOT_WORD: CATEGORY_LOC_NOT_WORD, CATEGORY_LINEBREAK: CATEGORY_LINEBREAK, CATEGORY_NOT_LINEBREAK: CATEGORY_NOT_LINEBREAK } CH_UNICODE = { CATEGORY_DIGIT: CATEGORY_UNI_DIGIT, CATEGORY_NOT_DIGIT: CATEGORY_UNI_NOT_DIGIT, CATEGORY_SPACE: CATEGORY_UNI_SPACE, CATEGORY_NOT_SPACE: CATEGORY_UNI_NOT_SPACE, CATEGORY_WORD: CATEGORY_UNI_WORD, CATEGORY_NOT_WORD: CATEGORY_UNI_NOT_WORD, CATEGORY_LINEBREAK: CATEGORY_UNI_LINEBREAK, CATEGORY_NOT_LINEBREAK: CATEGORY_UNI_NOT_LINEBREAK } # flags SRE_FLAG_TEMPLATE = 1 # template mode (disable backtracking) SRE_FLAG_IGNORECASE = 2 # case insensitive SRE_FLAG_LOCALE = 4 # honour system locale SRE_FLAG_MULTILINE = 8 # treat target as multiline string SRE_FLAG_DOTALL = 16 # treat target as a single string SRE_FLAG_UNICODE = 32 # use unicode "locale" SRE_FLAG_VERBOSE = 64 # ignore whitespace and comments SRE_FLAG_DEBUG = 128 # debugging SRE_FLAG_ASCII = 256 # use ascii "locale" # flags for INFO primitive SRE_INFO_PREFIX = 1 # has prefix SRE_INFO_LITERAL = 2 # entire pattern is literal (given by prefix) SRE_INFO_CHARSET = 4 # pattern starts with character from given set if __name__ == "__main__": def dump(f, d, prefix): items = d.items() items.sort(key=lambda a: a[1]) for k, v in items: f.write("#define %s_%s %s\n" % (prefix, k.upper(), v)) f = open("sre_constants.h", "w") f.write("""\ /* * Secret Labs' Regular Expression Engine * * regular expression matching engine * * NOTE: This file is generated by sre_constants.py. If you need * to change anything in here, edit sre_constants.py and run it. * * Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved. * * See the _sre.c file for information on usage and redistribution. */ """) f.write("#define SRE_MAGIC %d\n" % MAGIC) dump(f, OPCODES, "SRE_OP") dump(f, ATCODES, "SRE") dump(f, CHCODES, "SRE") f.write("#define SRE_FLAG_TEMPLATE %d\n" % SRE_FLAG_TEMPLATE) f.write("#define SRE_FLAG_IGNORECASE %d\n" % SRE_FLAG_IGNORECASE) f.write("#define SRE_FLAG_LOCALE %d\n" % SRE_FLAG_LOCALE) f.write("#define SRE_FLAG_MULTILINE %d\n" % SRE_FLAG_MULTILINE) f.write("#define SRE_FLAG_DOTALL %d\n" % SRE_FLAG_DOTALL) f.write("#define SRE_FLAG_UNICODE %d\n" % SRE_FLAG_UNICODE) f.write("#define SRE_FLAG_VERBOSE %d\n" % SRE_FLAG_VERBOSE) f.write("#define SRE_INFO_PREFIX %d\n" % SRE_INFO_PREFIX) f.write("#define SRE_INFO_LITERAL %d\n" % SRE_INFO_LITERAL) f.write("#define SRE_INFO_CHARSET %d\n" % SRE_INFO_CHARSET) f.close() print("done")
harmy/kbengine
kbe/res/scripts/common/Lib/sre_constants.py
Python
lgpl-3.0
7,444
<?php /** * @group regression * @covers ApiKeys_ApiKeyStruct::validSecret * User: dinies * Date: 21/06/16 * Time: 15.50 */ class GetUserApiKeyTest extends AbstractTest { protected $uid; private $test_data; function setup() { /** * environment initialization */ $this->test_data = new StdClass(); $this->test_data->user = Factory_User::create(); $this->test_data->api_key = Factory_ApiKey::create( [ 'uid' => $this->test_data->user->uid, ] ); } public function test_getUser_success() { $user = $this->test_data->api_key->getUser(); $this->assertTrue( $user instanceof Users_UserStruct ); $this->assertEquals( "{$this->test_data->user->uid}", $user->uid ); $this->assertEquals( "{$this->test_data->user->email}", $user->email ); $this->assertEquals( "{$this->test_data->user->salt}", $user->salt ); $this->assertEquals( "{$this->test_data->user->pass}", $user->pass ); $this->assertRegExp( '/^[0-9]{4}-[0-9]{2}-[0-9]{2} [0-2]?[0-9]:[0-5][0-9]:[0-5][0-9]$/', $user->create_date ); $this->assertEquals( "{$this->test_data->user->create_date}", $user->create_date ); $this->assertEquals( "{$this->test_data->user->first_name}", $user->first_name ); $this->assertEquals( "{$this->test_data->user->last_name}", $user->last_name ); } public function test_getUser_failure() { $this->test_data->api_key->uid += 1000; $this->assertNull( $this->test_data->api_key->getUser() ); } }
riccio82/MateCat
test/unit/Structs/TestApiKeyStruct/GetUserApiKeyTest.php
PHP
lgpl-3.0
1,600
// // System.Web.UI.HtmlControls.HtmlSelect.cs // // Author: // Dick Porter <dick@ximian.com> // // Copyright (C) 2005-2010 Novell, Inc (http://www.novell.com) // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System.Web.UI.WebControls; using System.Web.Util; using System.ComponentModel; using System.Collections; using System.Collections.Specialized; using System.Globalization; using System.Security.Permissions; namespace System.Web.UI.HtmlControls { // CAS [AspNetHostingPermission (SecurityAction.LinkDemand, Level = AspNetHostingPermissionLevel.Minimal)] [AspNetHostingPermission (SecurityAction.InheritanceDemand, Level = AspNetHostingPermissionLevel.Minimal)] // attributes [DefaultEvent ("ServerChange")] [ValidationProperty ("Value")] [ControlBuilder (typeof (HtmlSelectBuilder))] [SupportsEventValidation] public class HtmlSelect : HtmlContainerControl, IPostBackDataHandler, IParserAccessor { static readonly object EventServerChange = new object (); DataSourceView _boundDataSourceView; bool requiresDataBinding; bool _initialized; object datasource; ListItemCollection items; public HtmlSelect () : base ("select") { } [DefaultValue ("")] [DesignerSerializationVisibility (DesignerSerializationVisibility.Hidden)] [WebSysDescription("")] [WebCategory("Data")] public virtual string DataMember { get { string member = Attributes["datamember"]; if (member == null) { return (String.Empty); } return (member); } set { if (value == null) { Attributes.Remove ("datamember"); } else { Attributes["datamember"] = value; } } } [DefaultValue (null)] [DesignerSerializationVisibility (DesignerSerializationVisibility.Hidden)] [WebSysDescription("")] [WebCategory("Data")] public virtual object DataSource { get { return (datasource); } set { if ((value != null) && !(value is IEnumerable) && !(value is IListSource)) { throw new ArgumentException (); } datasource = value; } } [DefaultValue ("")] public virtual string DataSourceID { get { return ViewState.GetString ("DataSourceID", ""); } set { if (DataSourceID == value) return; ViewState ["DataSourceID"] = value; if (_boundDataSourceView != null) _boundDataSourceView.DataSourceViewChanged -= OnDataSourceViewChanged; _boundDataSourceView = null; OnDataPropertyChanged (); } } [DefaultValue ("")] [WebSysDescription("")] [WebCategory("Data")] public virtual string DataTextField { get { string text = Attributes["datatextfield"]; if (text == null) { return (String.Empty); } return (text); } set { if (value == null) { Attributes.Remove ("datatextfield"); } else { Attributes["datatextfield"] = value; } } } [DefaultValue ("")] [WebSysDescription("")] [WebCategory("Data")] public virtual string DataValueField { get { string value = Attributes["datavaluefield"]; if (value == null) { return (String.Empty); } return (value); } set { if (value == null) { Attributes.Remove ("datavaluefield"); } else { Attributes["datavaluefield"] = value; } } } public override string InnerHtml { get { throw new NotSupportedException (); } set { throw new NotSupportedException (); } } public override string InnerText { get { throw new NotSupportedException (); } set { throw new NotSupportedException (); } } protected bool IsBoundUsingDataSourceID { get { return (DataSourceID.Length != 0); } } [DesignerSerializationVisibility (DesignerSerializationVisibility.Hidden)] [Browsable (false)] public ListItemCollection Items { get { if (items == null) { items = new ListItemCollection (); if (IsTrackingViewState) ((IStateManager) items).TrackViewState (); } return (items); } } [DefaultValue ("")] [DesignerSerializationVisibility (DesignerSerializationVisibility.Hidden)] [WebSysDescription("")] [WebCategory("Behavior")] public bool Multiple { get { string multi = Attributes["multiple"]; if (multi == null) { return (false); } return (true); } set { if (value == false) { Attributes.Remove ("multiple"); } else { Attributes["multiple"] = "multiple"; } } } [DefaultValue ("")] [DesignerSerializationVisibility (DesignerSerializationVisibility.Hidden)] [WebSysDescription("")] [WebCategory("Behavior")] public string Name { get { return (UniqueID); } set { /* Do nothing */ } } protected bool RequiresDataBinding { get { return requiresDataBinding; } set { requiresDataBinding = value; } } [DesignerSerializationVisibility (DesignerSerializationVisibility.Hidden)] [Browsable (false)] public virtual int SelectedIndex { get { /* Make sure Items has been initialised */ ListItemCollection listitems = Items; for (int i = 0; i < listitems.Count; i++) { if (listitems[i].Selected) { return (i); } } /* There is always a selected item in * non-multiple mode, if the size is * <= 1 */ if (!Multiple && Size <= 1) { /* Select the first item */ if (listitems.Count > 0) { /* And make it stick * if there is * anything in the * list */ listitems[0].Selected = true; } return (0); } return (-1); } set { ClearSelection (); if (value == -1 || items == null) { return; } if (value < 0 || value >= items.Count) { throw new ArgumentOutOfRangeException ("value"); } items[value].Selected = true; } } /* "internal infrastructure" according to the docs, * but has some documentation in 2.0 */ protected virtual int[] SelectedIndices { get { ArrayList selected = new ArrayList (); int count = Items.Count; for (int i = 0; i < count; i++) { if (Items [i].Selected) { selected.Add (i); } } return ((int[])selected.ToArray (typeof (int))); } } [DesignerSerializationVisibility (DesignerSerializationVisibility.Hidden)] public int Size { get { string size = Attributes["size"]; if (size == null) { return (-1); } return (Int32.Parse (size, Helpers.InvariantCulture)); } set { if (value == -1) { Attributes.Remove ("size"); } else { Attributes["size"] = value.ToString (); } } } [DesignerSerializationVisibility (DesignerSerializationVisibility.Hidden)] public string Value { get { int sel = SelectedIndex; if (sel >= 0 && sel < Items.Count) { return (Items[sel].Value); } return (String.Empty); } set { int sel = Items.IndexOf (value); if (sel >= 0) { SelectedIndex = sel; } } } [WebSysDescription("")] [WebCategory("Action")] public event EventHandler ServerChange { add { Events.AddHandler (EventServerChange, value); } remove { Events.RemoveHandler (EventServerChange, value); } } protected override void AddParsedSubObject (object obj) { if (!(obj is ListItem)) { throw new HttpException ("HtmlSelect can only contain ListItem"); } Items.Add ((ListItem)obj); base.AddParsedSubObject (obj); } /* "internal infrastructure" according to the docs, * but has some documentation in 2.0 */ protected virtual void ClearSelection () { if (items == null) { return; } int count = items.Count; for (int i = 0; i < count; i++) { items[i].Selected = false; } } protected override ControlCollection CreateControlCollection () { return (base.CreateControlCollection ()); } protected void EnsureDataBound () { if (IsBoundUsingDataSourceID && RequiresDataBinding) DataBind (); } protected virtual IEnumerable GetData () { if (DataSource != null && IsBoundUsingDataSourceID) throw new HttpException ("Control bound using both DataSourceID and DataSource properties."); if (DataSource != null) return DataSourceResolver.ResolveDataSource (DataSource, DataMember); if (!IsBoundUsingDataSourceID) return null; IEnumerable result = null; DataSourceView boundDataSourceView = ConnectToDataSource (); boundDataSourceView.Select (DataSourceSelectArguments.Empty, delegate (IEnumerable data) { result = data; }); return result; } protected override void LoadViewState (object savedState) { object first = null; object second = null; Pair pair = savedState as Pair; if (pair != null) { first = pair.First; second = pair.Second; } base.LoadViewState (first); if (second != null) { IStateManager manager = Items as IStateManager; manager.LoadViewState (second); } } protected override void OnDataBinding (EventArgs e) { base.OnDataBinding (e); /* Make sure Items has been initialised */ ListItemCollection listitems = Items; listitems.Clear (); IEnumerable list = GetData (); if (list == null) return; foreach (object container in list) { string text = null; string value = null; if (DataTextField == String.Empty && DataValueField == String.Empty) { text = container.ToString (); value = text; } else { if (DataTextField != String.Empty) { text = DataBinder.Eval (container, DataTextField).ToString (); } if (DataValueField != String.Empty) { value = DataBinder.Eval (container, DataValueField).ToString (); } else { value = text; } if (text == null && value != null) { text = value; } } if (text == null) { text = String.Empty; } if (value == null) { value = String.Empty; } ListItem item = new ListItem (text, value); listitems.Add (item); } RequiresDataBinding = false; IsDataBound = true; } protected virtual void OnDataPropertyChanged () { if (_initialized) RequiresDataBinding = true; } protected virtual void OnDataSourceViewChanged (object sender, EventArgs e) { RequiresDataBinding = true; } protected internal override void OnInit (EventArgs e) { base.OnInit (e); Page.PreLoad += new EventHandler (OnPagePreLoad); } protected virtual void OnPagePreLoad (object sender, EventArgs e) { Initialize (); } protected internal override void OnLoad (EventArgs e) { if (!_initialized) Initialize (); base.OnLoad (e); } void Initialize () { _initialized = true; if (!IsDataBound) RequiresDataBinding = true; if (IsBoundUsingDataSourceID) ConnectToDataSource (); } bool IsDataBound{ get { return ViewState.GetBool ("_DataBound", false); } set { ViewState ["_DataBound"] = value; } } DataSourceView ConnectToDataSource () { if (_boundDataSourceView != null) return _boundDataSourceView; /* verify that the data source exists and is an IDataSource */ object ctrl = null; Page page = Page; if (page != null) ctrl = page.FindControl (DataSourceID); if (ctrl == null || !(ctrl is IDataSource)) { string format; if (ctrl == null) format = "DataSourceID of '{0}' must be the ID of a control of type IDataSource. A control with ID '{1}' could not be found."; else format = "DataSourceID of '{0}' must be the ID of a control of type IDataSource. '{1}' is not an IDataSource."; throw new HttpException (String.Format (format, ID, DataSourceID)); } _boundDataSourceView = ((IDataSource)ctrl).GetView (String.Empty); _boundDataSourceView.DataSourceViewChanged += OnDataSourceViewChanged; return _boundDataSourceView; } protected internal override void OnPreRender (EventArgs e) { EnsureDataBound (); base.OnPreRender (e); Page page = Page; if (page != null && !Disabled) { page.RegisterRequiresPostBack (this); page.RegisterEnabledControl (this); } } protected virtual void OnServerChange (EventArgs e) { EventHandler handler = (EventHandler)Events[EventServerChange]; if (handler != null) { handler (this, e); } } protected override void RenderAttributes (HtmlTextWriter w) { Page page = Page; if (page != null) page.ClientScript.RegisterForEventValidation (UniqueID); /* If there is no "name" attribute, * LoadPostData doesn't work... */ w.WriteAttribute ("name", Name); Attributes.Remove ("name"); /* Don't render the databinding attributes */ Attributes.Remove ("datamember"); Attributes.Remove ("datatextfield"); Attributes.Remove ("datavaluefield"); base.RenderAttributes (w); } protected internal override void RenderChildren (HtmlTextWriter w) { base.RenderChildren (w); if (items == null) return; w.WriteLine (); bool done_sel = false; int count = items.Count; for (int i = 0; i < count; i++) { ListItem item = items[i]; w.Indent++; /* Write the <option> elements this * way so that the output HTML matches * the ms version (can't make * HtmlTextWriterTag.Option an inline * element, cos that breaks other * stuff.) */ w.WriteBeginTag ("option"); if (item.Selected && !done_sel) { w.WriteAttribute ("selected", "selected"); if (!Multiple) { done_sel = true; } } w.WriteAttribute ("value", item.Value, true); if (item.HasAttributes) { AttributeCollection attrs = item.Attributes; foreach (string key in attrs.Keys) w.WriteAttribute (key, HttpUtility.HtmlAttributeEncode (attrs [key])); } w.Write (HtmlTextWriter.TagRightChar); w.Write (HttpUtility.HtmlEncode(item.Text)); w.WriteEndTag ("option"); w.WriteLine (); w.Indent--; } } protected override object SaveViewState () { object first = null; object second = null; first = base.SaveViewState (); IStateManager manager = items as IStateManager; if (manager != null) { second = manager.SaveViewState (); } if (first == null && second == null) return (null); return new Pair (first, second); } /* "internal infrastructure" according to the docs, * but has some documentation in 2.0 */ protected virtual void Select (int[] selectedIndices) { if (items == null) { return; } ClearSelection (); int count = items.Count; foreach (int i in selectedIndices) { if (i >= 0 && i < count) { items[i].Selected = true; } } } protected override void TrackViewState () { base.TrackViewState (); IStateManager manager = items as IStateManager; if (manager != null) { manager.TrackViewState (); } } protected virtual void RaisePostDataChangedEvent () { OnServerChange (EventArgs.Empty); } protected virtual bool LoadPostData (string postDataKey, NameValueCollection postCollection) { /* postCollection contains the values that are * selected */ string[] values = postCollection.GetValues (postDataKey); bool changed = false; if (values != null) { if (Multiple) { /* We have a set of * selections. We can't just * set the new list, because * we need to know if the set * has changed from last time */ int value_len = values.Length; int[] old_sel = SelectedIndices; int[] new_sel = new int[value_len]; int old_sel_len = old_sel.Length; for (int i = 0; i < value_len; i++) { new_sel[i] = Items.IndexOf (values[i]); if (old_sel_len != value_len || old_sel[i] != new_sel[i]) { changed = true; } } if (changed) { Select (new_sel); } } else { /* Just take the first one */ int sel = Items.IndexOf (values[0]); if (sel != SelectedIndex) { SelectedIndex = sel; changed = true; } } } if (changed) ValidateEvent (postDataKey, String.Empty); return (changed); } bool IPostBackDataHandler.LoadPostData (string postDataKey, NameValueCollection postCollection) { return LoadPostData (postDataKey, postCollection); } void IPostBackDataHandler.RaisePostDataChangedEvent () { RaisePostDataChangedEvent (); } } }
edwinspire/VSharp
class/System.Web/System.Web.UI.HtmlControls/HtmlSelect.cs
C#
lgpl-3.0
17,515
// // ServiceCredentials.cs // // Author: // Atsushi Enomoto <atsushi@ximian.com> // // Copyright (C) 2005 Novell, Inc. http://www.novell.com // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Collections.ObjectModel; using System.IdentityModel.Selectors; using System.ServiceModel.Channels; using System.ServiceModel.Security; using System.ServiceModel.Security.Tokens; using System.ServiceModel.Description; using System.ServiceModel.Dispatcher; namespace System.ServiceModel.Description { public class ServiceCredentials : SecurityCredentialsManager, IServiceBehavior { public ServiceCredentials () { } protected ServiceCredentials (ServiceCredentials other) { initiator = other.initiator.Clone (); peer = other.peer.Clone (); recipient = other.recipient.Clone (); userpass = other.userpass.Clone (); windows = other.windows.Clone (); issued_token = other.issued_token.Clone (); secure_conversation = other.secure_conversation.Clone (); } X509CertificateInitiatorServiceCredential initiator = new X509CertificateInitiatorServiceCredential (); PeerCredential peer = new PeerCredential (); X509CertificateRecipientServiceCredential recipient = new X509CertificateRecipientServiceCredential (); UserNamePasswordServiceCredential userpass = new UserNamePasswordServiceCredential (); WindowsServiceCredential windows = new WindowsServiceCredential (); IssuedTokenServiceCredential issued_token = new IssuedTokenServiceCredential (); SecureConversationServiceCredential secure_conversation = new SecureConversationServiceCredential (); public X509CertificateInitiatorServiceCredential ClientCertificate { get { return initiator; } } public IssuedTokenServiceCredential IssuedTokenAuthentication { get { return issued_token; } } public PeerCredential Peer { get { return peer; } } public SecureConversationServiceCredential SecureConversationAuthentication { get { return secure_conversation; } } public X509CertificateRecipientServiceCredential ServiceCertificate { get { return recipient; } } public UserNamePasswordServiceCredential UserNameAuthentication { get { return userpass; } } public WindowsServiceCredential WindowsAuthentication { get { return windows; } } public ServiceCredentials Clone () { ServiceCredentials ret = CloneCore (); if (ret.GetType () != GetType ()) throw new NotImplementedException ("CloneCore() must be implemented to return an instance of the same type in this custom ServiceCredentials type."); return ret; } protected virtual ServiceCredentials CloneCore () { return new ServiceCredentials (this); } public override SecurityTokenManager CreateSecurityTokenManager () { return new ServiceCredentialsSecurityTokenManager (this); } void IServiceBehavior.AddBindingParameters ( ServiceDescription description, ServiceHostBase serviceHostBase, Collection<ServiceEndpoint> endpoints, BindingParameterCollection parameters) { parameters.Add (this); } void IServiceBehavior.ApplyDispatchBehavior ( ServiceDescription description, ServiceHostBase serviceHostBase) { // do nothing } [MonoTODO] void IServiceBehavior.Validate ( ServiceDescription description, ServiceHostBase serviceHostBase) { // unlike MSDN description, it does not throw NIE. } } }
edwinspire/VSharp
class/System.ServiceModel/System.ServiceModel.Description/ServiceCredentials.cs
C#
lgpl-3.0
4,458
"use strict"; var express = require('express'); var less = require('less-middleware'); function HttpServer(port, staticServedPath, logRequest) { this.port = port; this.staticServedPath = staticServedPath; this.logRequest = (typeof logRequest === "undefined") ? true : logRequest; } HttpServer.prototype.start = function(fn) { console.log("Starting server"); var self = this; var app = express(); self.app = app; if(self.logRequest) { app.use(function (req, res, next) { console.log(req.method, req.url); next(); }); } app.use('/', express.static(self.staticServedPath)); self.server = app.listen(self.port, function () { console.log("Server started on port", self.port); if (fn !== undefined) fn(); }); }; HttpServer.prototype.stop = function() { console.log("Stopping server"); var self = this; self.server.close(); }; module.exports = HttpServer;
o-schneider/heroesdesk-front-web
src/server/HttpServer.js
JavaScript
lgpl-3.0
917
using System; using System.Collections.Generic; using DoxygenWrapper.Wrappers.Compounds.Types; using System.Xml; namespace DoxygenWrapper.Wrappers.Compounds { public class CompoundMember: Compound { protected override void OnParse(XmlNode _node) { base.OnParse(_node); mCompoundType = new CompoundType(_node["type"], _node["name"].Value); } public CompoundType CompoundType { get { return mCompoundType; } } private CompoundType mCompoundType; } }
blunted2night/MyGUI
Wrappers/DoxygenWrapper/DoxygenWrapper/Wrappers/Compounds/CompoundMember.cs
C#
lgpl-3.0
508
# encoding: utf-8 import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'Result' db.create_table('taxonomy_result', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('title', self.gf('django.db.models.fields.CharField')(max_length=100)), ('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'])), ('object_id', self.gf('django.db.models.fields.PositiveIntegerField')()), )) db.send_create_signal('taxonomy', ['Result']) # Adding model 'Tag' db.create_table('taxonomy_tag', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('title', self.gf('django.db.models.fields.CharField')(unique=True, max_length=100)), ('slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=50, db_index=True)), )) db.send_create_signal('taxonomy', ['Tag']) # Adding model 'Category' db.create_table('taxonomy_category', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('parent', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='children', null=True, to=orm['taxonomy.Category'])), ('title', self.gf('django.db.models.fields.CharField')(unique=True, max_length=100)), ('slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=50, db_index=True)), )) db.send_create_signal('taxonomy', ['Category']) # Adding model 'Vote' db.create_table('taxonomy_vote', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'])), ('object_id', self.gf('django.db.models.fields.PositiveIntegerField')()), ('owner', self.gf('django.db.models.fields.related.ForeignKey')(related_name='poll_votes', to=orm['auth.User'])), ('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)), )) db.send_create_signal('taxonomy', ['Vote']) # Adding unique constraint on 'Vote', fields ['owner', 'content_type', 'object_id'] db.create_unique('taxonomy_vote', ['owner_id', 'content_type_id', 'object_id']) def backwards(self, orm): # Removing unique constraint on 'Vote', fields ['owner', 'content_type', 'object_id'] db.delete_unique('taxonomy_vote', ['owner_id', 'content_type_id', 'object_id']) # Deleting model 'Result' db.delete_table('taxonomy_result') # Deleting model 'Tag' db.delete_table('taxonomy_tag') # Deleting model 'Category' db.delete_table('taxonomy_category') # Deleting model 'Vote' db.delete_table('taxonomy_vote') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'taxonomy.category': { 'Meta': {'ordering': "('title',)", 'object_name': 'Category'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['taxonomy.Category']"}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}) }, 'taxonomy.result': { 'Meta': {'object_name': 'Result'}, 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'taxonomy.tag': { 'Meta': {'ordering': "('title',)", 'object_name': 'Tag'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}) }, 'taxonomy.vote': { 'Meta': {'unique_together': "(('owner', 'content_type', 'object_id'),)", 'object_name': 'Vote'}, 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}), 'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'poll_votes'", 'to': "orm['auth.User']"}) } } complete_apps = ['taxonomy']
zuck/prometeo-erp
core/taxonomy/migrations/0001_initial.py
Python
lgpl-3.0
8,429
/* * World Calendars * https://github.com/alexcjohnson/world-calendars * * Batch-converted from kbwood/calendars * Many thanks to Keith Wood and all of the contributors to the original project! * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ /* http://keith-wood.name/calendars.html Traditional Chinese localisation for Taiwanese calendars for jQuery v2.0.2. Written by Ressol (ressol@gmail.com). */ var main = require('../main'); main.calendars.taiwan.prototype.regionalOptions['zh-TW'] = { name: 'Taiwan', epochs: ['BROC', 'ROC'], monthNames: ['一月','二月','三月','四月','五月','六月', '七月','八月','九月','十月','十一月','十二月'], monthNamesShort: ['一','二','三','四','五','六', '七','八','九','十','十一','十二'], dayNames: ['星期日','星期一','星期二','星期三','星期四','星期五','星期六'], dayNamesShort: ['周日','周一','周二','周三','周四','周五','周六'], dayNamesMin: ['日','一','二','三','四','五','六'], digits: null, dateFormat: 'yyyy/mm/dd', firstDay: 1, isRTL: false };
andrealmeid/ToT
node_modules/world-calendars/dist/calendars/taiwan-zh-TW.js
JavaScript
unlicense
1,220
/*** *mbsupr.c - Convert string upper case (MBCS) * * Copyright (c) 1985-1997, Microsoft Corporation. All rights reserved. * *Purpose: * Convert string upper case (MBCS) * *******************************************************************************/ #ifdef _MBCS #if defined (_WIN32) #include <awint.h> #endif /* defined (_WIN32) */ #include <mtdll.h> #include <cruntime.h> #include <ctype.h> #include <mbdata.h> #include <mbstring.h> #include <mbctype.h> /*** * _mbsupr - Convert string upper case (MBCS) * *Purpose: * Converts all the lower case characters in a string * to upper case in place. Handles MBCS chars correctly. * *Entry: * unsigned char *string = pointer to string * *Exit: * Returns a pointer to the input string; no error return. * *Exceptions: * *******************************************************************************/ unsigned char * __cdecl _mbsupr( unsigned char *string ) { unsigned char *cp; _mlock(_MB_CP_LOCK); for (cp=string; *cp; cp++) { if (_ISLEADBYTE(*cp)) { #if defined (_WIN32) int retval; unsigned char ret[4]; if ((retval = __crtLCMapStringA(__mblcid, LCMAP_UPPERCASE, cp, 2, ret, 2, __mbcodepage, TRUE)) == 0) { _munlock(_MB_CP_LOCK); return NULL; } *cp = ret[0]; if (retval > 1) *(++cp) = ret[1]; #else /* defined (_WIN32) */ int mbval = ((*cp) << 8) + *(cp+1); cp++; if ( mbval >= _MBLOWERLOW1 && mbval <= _MBLOWERHIGH1 ) *cp -= _MBCASEDIFF1; else if (mbval >= _MBLOWERLOW2 && mbval <= _MBLOWERHIGH2 ) *cp -= _MBCASEDIFF2; #endif /* defined (_WIN32) */ } else /* single byte, macro version */ *cp = (unsigned char) _mbbtoupper(*cp); } _munlock(_MB_CP_LOCK); return string ; } #endif /* _MBCS */
hyller/CodeLibrary
The_Standard_C_Library/MBSUPR.C
C++
unlicense
2,615
<!-- To change this template, choose Tools | Templates and open the template in the editor. --> <body> <legend><a href="<?php echo base_url(); ?>" >Home</a> | <a href="<?php echo base_url(); ?>reviewer" >Refresh</a> | <?php echo anchor('reviewer/samples_for_review/'.$reviewer_id,'Worksheets Uploaded For Review'); ?> </legend> <hr /> <!-- Menu Start --> </div> <!-- End Menu --> <div> <table id = "refsubs"> <thead> <tr> <th>File Name</th> <th>Lab Reference No</th> <th>Download </th> <th>Status</th> <th>Upload</th> </tr> </thead> <tbody> <td></td> <td></td> <td></td> <td></td> <td></td> </tbody> </table> <script type="text/javascript"> $('#refsubs').dataTable({ "bJQueryUI": true }).rowGrouping({ iGroupingColumnIndex: 1, sGroupingColumnSortDirection: "asc", iGroupingOrderByColumnIndex: 1, //bExpandableGrouping:true, //bExpandSingleGroup: true, iExpandGroupOffset: -1 }); </script> </div> </body> </html>
johnotaalo/NQCL_LIMS
application/views/reviewer_v_tr.php
PHP
unlicense
1,312
'use strict'; import EventMap from 'eventmap'; import Log from './log'; var audioTypes = { 'mp3': 'audio/mpeg', 'wav': 'audio/wav', 'ogg': 'audio/ogg' }; var imageTypes = { 'png': 'image/png', 'jpg': 'image/jpg', 'gif': 'image/gif' }; class AssetLoader extends EventMap { constructor(assets) { super(); this.assets = assets || {}; this.files = {}; this.maxAssets = 0; this.assetsLoaded = 0; this.percentLoaded = 0; this.cache = {}; } start() { // TODO: Something was wrong here. So it's deleted right now } } export default AssetLoader;
maxwerr/gamebox
src/assetloader.js
JavaScript
unlicense
599
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #include <aws/docdb/model/DescribePendingMaintenanceActionsResult.h> #include <aws/core/utils/xml/XmlSerializer.h> #include <aws/core/AmazonWebServiceResult.h> #include <aws/core/utils/StringUtils.h> #include <aws/core/utils/logging/LogMacros.h> #include <utility> using namespace Aws::DocDB::Model; using namespace Aws::Utils::Xml; using namespace Aws::Utils::Logging; using namespace Aws::Utils; using namespace Aws; DescribePendingMaintenanceActionsResult::DescribePendingMaintenanceActionsResult() { } DescribePendingMaintenanceActionsResult::DescribePendingMaintenanceActionsResult(const Aws::AmazonWebServiceResult<XmlDocument>& result) { *this = result; } DescribePendingMaintenanceActionsResult& DescribePendingMaintenanceActionsResult::operator =(const Aws::AmazonWebServiceResult<XmlDocument>& result) { const XmlDocument& xmlDocument = result.GetPayload(); XmlNode rootNode = xmlDocument.GetRootElement(); XmlNode resultNode = rootNode; if (!rootNode.IsNull() && (rootNode.GetName() != "DescribePendingMaintenanceActionsResult")) { resultNode = rootNode.FirstChild("DescribePendingMaintenanceActionsResult"); } if(!resultNode.IsNull()) { XmlNode pendingMaintenanceActionsNode = resultNode.FirstChild("PendingMaintenanceActions"); if(!pendingMaintenanceActionsNode.IsNull()) { XmlNode pendingMaintenanceActionsMember = pendingMaintenanceActionsNode.FirstChild("ResourcePendingMaintenanceActions"); while(!pendingMaintenanceActionsMember.IsNull()) { m_pendingMaintenanceActions.push_back(pendingMaintenanceActionsMember); pendingMaintenanceActionsMember = pendingMaintenanceActionsMember.NextNode("ResourcePendingMaintenanceActions"); } } XmlNode markerNode = resultNode.FirstChild("Marker"); if(!markerNode.IsNull()) { m_marker = Aws::Utils::Xml::DecodeEscapedXmlText(markerNode.GetText()); } } if (!rootNode.IsNull()) { XmlNode responseMetadataNode = rootNode.FirstChild("ResponseMetadata"); m_responseMetadata = responseMetadataNode; AWS_LOGSTREAM_DEBUG("Aws::DocDB::Model::DescribePendingMaintenanceActionsResult", "x-amzn-request-id: " << m_responseMetadata.GetRequestId() ); } return *this; }
jt70471/aws-sdk-cpp
aws-cpp-sdk-docdb/source/model/DescribePendingMaintenanceActionsResult.cpp
C++
apache-2.0
2,358
package examples.Bricklet.Moisture; import com.tinkerforge.BrickletMoisture; import com.tinkerforge.IPConnection; public class ExampleSimple { private static final String host = "localhost"; private static final int port = 4223; private static final String UID = "XYZ"; // Change to your UID // Note: To make the examples code cleaner we do not handle exceptions. Exceptions you // might normally want to catch are described in the documentation public static void main(String args[]) throws Exception { IPConnection ipcon = new IPConnection(); // Create IP connection BrickletMoisture al = new BrickletMoisture(UID, ipcon); // Create device object ipcon.connect(host, port); // Connect to brickd // Don't use device before ipcon is connected // Get current moisture value int moisture = al.getMoistureValue(); // Can throw com.tinkerforge.TimeoutException System.out.println("Moisture Value: " + moisture); System.console().readLine("Press key to exit\n"); ipcon.disconnect(); } }
jaggr2/ch.bfh.fbi.mobiComp.17herz
com.tinkerforge/src/examples/Bricklet/Moisture/ExampleSimple.java
Java
apache-2.0
1,020
/** * Copyright 2015 LinkedIn Corp. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ package wherehows.ingestion.converters; import com.linkedin.events.metadata.DatasetIdentifier; import com.linkedin.events.metadata.DeploymentDetail; import com.linkedin.events.metadata.MetadataChangeEvent; import java.util.Collections; import org.testng.annotations.Test; import static org.testng.Assert.*; public class KafkaLogCompactionConverterTest { @Test public void testConvert() { MetadataChangeEvent event = new MetadataChangeEvent(); event.datasetIdentifier = new DatasetIdentifier(); event.datasetIdentifier.dataPlatformUrn = "urn:li:dataPlatform:kafka"; DeploymentDetail deployment = new DeploymentDetail(); deployment.additionalDeploymentInfo = Collections.singletonMap("EI", "compact"); event.deploymentInfo = Collections.singletonList(deployment); MetadataChangeEvent newEvent = new KafkaLogCompactionConverter().convert(event); assertEquals(newEvent.datasetIdentifier.dataPlatformUrn, "urn:li:dataPlatform:kafka-lc"); } @Test public void testNotConvert() { KafkaLogCompactionConverter converter = new KafkaLogCompactionConverter(); MetadataChangeEvent event = new MetadataChangeEvent(); event.datasetIdentifier = new DatasetIdentifier(); event.datasetIdentifier.dataPlatformUrn = "foo"; DeploymentDetail deployment = new DeploymentDetail(); deployment.additionalDeploymentInfo = Collections.singletonMap("EI", "compact"); event.deploymentInfo = Collections.singletonList(deployment); MetadataChangeEvent newEvent = converter.convert(event); assertEquals(newEvent.datasetIdentifier.dataPlatformUrn, "foo"); event.datasetIdentifier.dataPlatformUrn = "urn:li:dataPlatform:kafka"; event.deploymentInfo = null; newEvent = converter.convert(event); assertEquals(newEvent.datasetIdentifier.dataPlatformUrn, "urn:li:dataPlatform:kafka"); event.datasetIdentifier.dataPlatformUrn = "urn:li:dataPlatform:kafka"; deployment.additionalDeploymentInfo = Collections.singletonMap("EI", "delete"); event.deploymentInfo = Collections.singletonList(deployment); newEvent = converter.convert(event); assertEquals(newEvent.datasetIdentifier.dataPlatformUrn, "urn:li:dataPlatform:kafka"); } }
alyiwang/WhereHows
wherehows-ingestion/src/test/java/wherehows/ingestion/converters/KafkaLogCompactionConverterTest.java
Java
apache-2.0
2,728
/* Copyright (C) 2013 Interactive Brokers LLC. All rights reserved. This code is subject to the terms * and conditions of the IB API Non-Commercial License or the IB API Commercial License, as applicable. */ using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace IBApi { public class EClientErrors { public static readonly CodeMsgPair AlreadyConnected = new CodeMsgPair(501, "Already Connected."); public static readonly CodeMsgPair CONNECT_FAIL = new CodeMsgPair(502, "Couldn't connect to TWS. Confirm that \"Enable ActiveX and Socket Clients\" is enabled on the TWS \"Configure->API\" menu."); public static readonly CodeMsgPair UPDATE_TWS = new CodeMsgPair(503, "The TWS is out of date and must be upgraded."); public static readonly CodeMsgPair NOT_CONNECTED = new CodeMsgPair(504, "Not connected"); public static readonly CodeMsgPair UNKNOWN_ID = new CodeMsgPair(505, "Fatal Error: Unknown message id."); public static readonly CodeMsgPair FAIL_SEND_REQMKT = new CodeMsgPair(510, "Request Market Data Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_CANMKT = new CodeMsgPair(511, "Cancel Market Data Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_ORDER = new CodeMsgPair(512, "Order Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_ACCT = new CodeMsgPair(513, "Account Update Request Sending Error -"); public static readonly CodeMsgPair FAIL_SEND_EXEC = new CodeMsgPair(514, "Request For Executions Sending Error -"); public static readonly CodeMsgPair FAIL_SEND_CORDER = new CodeMsgPair(515, "Cancel Order Sending Error -"); public static readonly CodeMsgPair FAIL_SEND_OORDER = new CodeMsgPair(516, "Request Open Order Sending Error -"); public static readonly CodeMsgPair UNKNOWN_CONTRACT = new CodeMsgPair(517, "Unknown contract. Verify the contract details supplied."); public static readonly CodeMsgPair FAIL_SEND_REQCONTRACT = new CodeMsgPair(518, "Request Contract Data Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_REQMKTDEPTH = new CodeMsgPair(519, "Request Market Depth Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_CANMKTDEPTH = new CodeMsgPair(520, "Cancel Market Depth Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_SERVER_LOG_LEVEL = new CodeMsgPair(521, "Set Server Log Level Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_FA_REQUEST = new CodeMsgPair(522, "FA Information Request Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_FA_REPLACE = new CodeMsgPair(523, "FA Information Replace Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_REQSCANNER = new CodeMsgPair(524, "Request Scanner Subscription Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_CANSCANNER = new CodeMsgPair(525, "Cancel Scanner Subscription Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_REQSCANNERPARAMETERS = new CodeMsgPair(526, "Request Scanner Parameter Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_REQHISTDATA = new CodeMsgPair(527, "Request Historical Data Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_CANHISTDATA = new CodeMsgPair(528, "Request Historical Data Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_REQRTBARS = new CodeMsgPair(529, "Request Real-time Bar Data Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_CANRTBARS = new CodeMsgPair(530, "Cancel Real-time Bar Data Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_REQCURRTIME = new CodeMsgPair(531, "Request Current Time Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_REQFUNDDATA = new CodeMsgPair(532, "Request Fundamental Data Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_CANFUNDDATA = new CodeMsgPair(533, "Cancel Fundamental Data Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_REQCALCIMPLIEDVOLAT = new CodeMsgPair(534, "Request Calculate Implied Volatility Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_REQCALCOPTIONPRICE = new CodeMsgPair(535, "Request Calculate Option Price Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_CANCALCIMPLIEDVOLAT = new CodeMsgPair(536, "Cancel Calculate Implied Volatility Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_CANCALCOPTIONPRICE = new CodeMsgPair(537, "Cancel Calculate Option Price Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_REQGLOBALCANCEL = new CodeMsgPair(538, "Request Global Cancel Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_REQMARKETDATATYPE = new CodeMsgPair(539, "Request Market Data Type Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_REQPOSITIONS = new CodeMsgPair(540, "Request Positions Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_CANPOSITIONS = new CodeMsgPair(541, "Cancel Positions Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_REQACCOUNTDATA = new CodeMsgPair(542, "Request Account Data Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_CANACCOUNTDATA = new CodeMsgPair(543, "Cancel Account Data Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_VERIFYREQUEST = new CodeMsgPair(544, "Verify Request Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_VERIFYMESSAGE = new CodeMsgPair(545, "Verify Message Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_QUERYDISPLAYGROUPS = new CodeMsgPair(546, "Query Display Groups Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_SUBSCRIBETOGROUPEVENTS = new CodeMsgPair(547, "Subscribe To Group Events Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_UPDATEDISPLAYGROUP = new CodeMsgPair(548, "Update Display Group Sending Error - "); public static readonly CodeMsgPair FAIL_SEND_UNSUBSCRIBEFROMGROUPEVENTS = new CodeMsgPair(549, "Unsubscribe From Group Events Sending Error - "); public static readonly CodeMsgPair FAIL_GENERIC = new CodeMsgPair(-1, "Specific error message needs to be given for these requests! "); } public class CodeMsgPair { private int code; private string message; public CodeMsgPair(int code, string message) { this.code = code; this.message = message; } public int Code { get { return code; } } public string Message { get { return message; } } } }
AvengersPy/MyPairs
testIBPython/csharpclient/EClientErrors.cs
C#
apache-2.0
6,913
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.fineract.template.api; import java.io.IOException; import java.net.MalformedURLException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.UriInfo; import org.apache.fineract.commands.domain.CommandWrapper; import org.apache.fineract.commands.service.CommandWrapperBuilder; import org.apache.fineract.commands.service.PortfolioCommandSourceWritePlatformService; import org.apache.fineract.infrastructure.core.api.ApiRequestParameterHelper; import org.apache.fineract.infrastructure.core.data.CommandProcessingResult; import org.apache.fineract.infrastructure.core.serialization.ApiRequestJsonSerializationSettings; import org.apache.fineract.infrastructure.core.serialization.DefaultToApiJsonSerializer; import org.apache.fineract.infrastructure.security.service.PlatformSecurityContext; import org.apache.fineract.template.data.TemplateData; import org.apache.fineract.template.domain.Template; import org.apache.fineract.template.domain.TemplateEntity; import org.apache.fineract.template.domain.TemplateType; import org.apache.fineract.template.service.TemplateDomainService; import org.apache.fineract.template.service.TemplateMergeService; import org.codehaus.jackson.map.ObjectMapper; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; @Path("/templates") @Consumes({ MediaType.APPLICATION_JSON }) @Produces({ MediaType.APPLICATION_JSON }) @Component @Scope("singleton") public class TemplatesApiResource { private final Set<String> RESPONSE_TEMPLATES_DATA_PARAMETERS = new HashSet<>(Arrays.asList("id")); private final Set<String> RESPONSE_TEMPLATE_DATA_PARAMETERS = new HashSet<>(Arrays.asList("id", "entities", "types", "template")); private final String RESOURCE_NAME_FOR_PERMISSION = "template"; private final PlatformSecurityContext context; private final DefaultToApiJsonSerializer<Template> toApiJsonSerializer; private final DefaultToApiJsonSerializer<TemplateData> templateDataApiJsonSerializer; private final ApiRequestParameterHelper apiRequestParameterHelper; private final TemplateDomainService templateService; private final TemplateMergeService templateMergeService; private final PortfolioCommandSourceWritePlatformService commandsSourceWritePlatformService; @Autowired public TemplatesApiResource(final PlatformSecurityContext context, final DefaultToApiJsonSerializer<Template> toApiJsonSerializer, final DefaultToApiJsonSerializer<TemplateData> templateDataApiJsonSerializer, final ApiRequestParameterHelper apiRequestParameterHelper, final TemplateDomainService templateService, final TemplateMergeService templateMergeService, final PortfolioCommandSourceWritePlatformService commandsSourceWritePlatformService) { this.context = context; this.toApiJsonSerializer = toApiJsonSerializer; this.templateDataApiJsonSerializer = templateDataApiJsonSerializer; this.apiRequestParameterHelper = apiRequestParameterHelper; this.templateService = templateService; this.templateMergeService = templateMergeService; this.commandsSourceWritePlatformService = commandsSourceWritePlatformService; } @GET public String retrieveAll(@DefaultValue("-1") @QueryParam("typeId") final int typeId, @DefaultValue("-1") @QueryParam("entityId") final int entityId, @Context final UriInfo uriInfo) { this.context.authenticatedUser().validateHasReadPermission(this.RESOURCE_NAME_FOR_PERMISSION); // FIXME - we dont use the ORM when doing fetches - we write SQL and // fetch through JDBC returning data to be serialized to JSON List<Template> templates = new ArrayList<>(); if (typeId != -1 && entityId != -1) { templates = this.templateService.getAllByEntityAndType(TemplateEntity.values()[entityId], TemplateType.values()[typeId]); } else { templates = this.templateService.getAll(); } final ApiRequestJsonSerializationSettings settings = this.apiRequestParameterHelper.process(uriInfo.getQueryParameters()); return this.toApiJsonSerializer.serialize(settings, templates, this.RESPONSE_TEMPLATES_DATA_PARAMETERS); } @GET @Path("template") public String template(@Context final UriInfo uriInfo) { this.context.authenticatedUser().validateHasReadPermission(this.RESOURCE_NAME_FOR_PERMISSION); final TemplateData templateData = TemplateData.template(); final ApiRequestJsonSerializationSettings settings = this.apiRequestParameterHelper.process(uriInfo.getQueryParameters()); return this.templateDataApiJsonSerializer.serialize(settings, templateData, this.RESPONSE_TEMPLATES_DATA_PARAMETERS); } @POST public String createTemplate(final String apiRequestBodyAsJson) { final CommandWrapper commandRequest = new CommandWrapperBuilder().createTemplate().withJson(apiRequestBodyAsJson).build(); final CommandProcessingResult result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest); return this.toApiJsonSerializer.serialize(result); } @GET @Path("{templateId}") public String retrieveOne(@PathParam("templateId") final Long templateId, @Context final UriInfo uriInfo) { this.context.authenticatedUser().validateHasReadPermission(this.RESOURCE_NAME_FOR_PERMISSION); final Template template = this.templateService.findOneById(templateId); final ApiRequestJsonSerializationSettings settings = this.apiRequestParameterHelper.process(uriInfo.getQueryParameters()); return this.toApiJsonSerializer.serialize(settings, template, this.RESPONSE_TEMPLATES_DATA_PARAMETERS); } @GET @Path("{templateId}/template") public String getTemplateByTemplate(@PathParam("templateId") final Long templateId, @Context final UriInfo uriInfo) { this.context.authenticatedUser().validateHasReadPermission(this.RESOURCE_NAME_FOR_PERMISSION); final TemplateData template = TemplateData.template(this.templateService.findOneById(templateId)); final ApiRequestJsonSerializationSettings settings = this.apiRequestParameterHelper.process(uriInfo.getQueryParameters()); return this.templateDataApiJsonSerializer.serialize(settings, template, this.RESPONSE_TEMPLATE_DATA_PARAMETERS); } @PUT @Path("{templateId}") public String saveTemplate(@PathParam("templateId") final Long templateId, final String apiRequestBodyAsJson) { final CommandWrapper commandRequest = new CommandWrapperBuilder().updateTemplate(templateId).withJson(apiRequestBodyAsJson).build(); final CommandProcessingResult result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest); return this.toApiJsonSerializer.serialize(result); } @DELETE @Path("{templateId}") public String deleteTemplate(@PathParam("templateId") final Long templateId) { final CommandWrapper commandRequest = new CommandWrapperBuilder().deleteTemplate(templateId).build(); final CommandProcessingResult result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest); return this.toApiJsonSerializer.serialize(result); } @POST @Path("{templateId}") @Produces({ MediaType.TEXT_HTML }) public String mergeTemplate(@PathParam("templateId") final Long templateId, @Context final UriInfo uriInfo, final String apiRequestBodyAsJson) throws MalformedURLException, IOException { final Template template = this.templateService.findOneById(templateId); @SuppressWarnings("unchecked") final HashMap<String, Object> result = new ObjectMapper().readValue(apiRequestBodyAsJson, HashMap.class); final MultivaluedMap<String, String> parameters = uriInfo.getQueryParameters(); final Map<String, Object> parametersMap = new HashMap<>(); for (final Map.Entry<String, List<String>> entry : parameters.entrySet()) { if (entry.getValue().size() == 1) { parametersMap.put(entry.getKey(), entry.getValue().get(0)); } else { parametersMap.put(entry.getKey(), entry.getValue()); } } parametersMap.put("BASE_URI", uriInfo.getBaseUri()); parametersMap.putAll(result); return this.templateMergeService.compile(template, parametersMap); } }
RanjithKumar5550/RanMifos
fineract-provider/src/main/java/org/apache/fineract/template/api/TemplatesApiResource.java
Java
apache-2.0
9,902
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.jsmpp.bean; /** * This is simple DataCoding. Only contains Alphabet (DEFAULT and 8-bit) and * Message Class. * * @author uudashr * */ public class SimpleDataCoding implements DataCoding { private final Alphabet alphabet; private final MessageClass messageClass; /** * Construct Data Coding using default Alphabet and * {@link MessageClass#CLASS1} Message Class. */ public SimpleDataCoding() { this(Alphabet.ALPHA_DEFAULT, MessageClass.CLASS1); } /** * Construct Data Coding using specified Alphabet and Message Class. * * @param alphabet is the alphabet. Only support * {@link Alphabet#ALPHA_DEFAULT} and {@link Alphabet#ALPHA_8_BIT}. * @param messageClass * @throws IllegalArgumentException if alphabet is <tt>null</tt> or using * non {@link Alphabet#ALPHA_DEFAULT} and * {@link Alphabet#ALPHA_8_BIT} alphabet or * <code>messageClass</code> is null. */ public SimpleDataCoding(Alphabet alphabet, MessageClass messageClass) throws IllegalArgumentException { if (alphabet == null) { throw new IllegalArgumentException( "Alphabet is mandatory, can't be null"); } if (alphabet.equals(Alphabet.ALPHA_UCS2) || alphabet.isReserved()) { throw new IllegalArgumentException( "Supported alphabet for SimpleDataCoding does not include " + Alphabet.ALPHA_UCS2 + " or " + "reserved alphabet codes. Current alphabet is " + alphabet); } if (messageClass == null) { throw new IllegalArgumentException( "MessageClass is mandatory, can't be null"); } this.alphabet = alphabet; this.messageClass = messageClass; } public Alphabet getAlphabet() { return alphabet; } public MessageClass getMessageClass() { return messageClass; } public byte toByte() { // base byte is 11110xxx or 0xf0, others injected byte value = (byte)0xf0; value |= alphabet.value(); value |= messageClass.value(); return value; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((alphabet == null) ? 0 : alphabet.hashCode()); result = prime * result + ((messageClass == null) ? 0 : messageClass.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; SimpleDataCoding other = (SimpleDataCoding)obj; if (alphabet == null) { if (other.alphabet != null) return false; } else if (!alphabet.equals(other.alphabet)) return false; if (messageClass == null) { if (other.messageClass != null) return false; } else if (!messageClass.equals(other.messageClass)) return false; return true; } @Override public String toString() { return "DataCoding:" + (0xff & toByte()); } }
amdtelecom/jsmpp
jsmpp/src/main/java/org/jsmpp/bean/SimpleDataCoding.java
Java
apache-2.0
3,967
/* * Copyright 2013-2014 Richard M. Hightower * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * __________ _____ __ .__ * \______ \ ____ ____ ____ /\ / \ _____ | | _|__| ____ ____ * | | _// _ \ / _ \ / \ \/ / \ / \\__ \ | |/ / |/ \ / ___\ * | | ( <_> | <_> ) | \ /\ / Y \/ __ \| <| | | \/ /_/ > * |______ /\____/ \____/|___| / \/ \____|__ (____ /__|_ \__|___| /\___ / * \/ \/ \/ \/ \/ \//_____/ * ____. ___________ _____ ______________.___. * | |____ ___ _______ \_ _____/ / _ \ / _____/\__ | | * | \__ \\ \/ /\__ \ | __)_ / /_\ \ \_____ \ / | | * /\__| |/ __ \\ / / __ \_ | \/ | \/ \ \____ | * \________(____ /\_/ (____ / /_______ /\____|__ /_______ / / ______| * \/ \/ \/ \/ \/ \/ */ package org.boon.validation.annotations; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Retention ( RetentionPolicy.RUNTIME ) @Target ( { ElementType.METHOD, ElementType.TYPE, ElementType.FIELD } ) public @interface Email { String detailMessage() default ""; String summaryMessage() default ""; }
wprice/boon
boon/src/main/java/org/boon/validation/annotations/Email.java
Java
apache-2.0
2,011
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.usages.impl.rules; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.GeneratedSourcesFilter; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.usageView.UsageInfo; import com.intellij.usageView.UsageViewBundle; import com.intellij.usages.*; import com.intellij.usages.rules.PsiElementUsage; import com.intellij.usages.rules.SingleParentUsageGroupingRule; import com.intellij.usages.rules.UsageInFile; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; /** * @author max */ public class NonCodeUsageGroupingRule extends SingleParentUsageGroupingRule { private final Project myProject; public NonCodeUsageGroupingRule(Project project) { myProject = project; } private static class CodeUsageGroup extends UsageGroupBase { private static final UsageGroup INSTANCE = new CodeUsageGroup(); private CodeUsageGroup() { super(0); } @Override @NotNull public String getText(UsageView view) { return view == null ? UsageViewBundle.message("node.group.code.usages") : view.getPresentation().getCodeUsagesString(); } public String toString() { //noinspection HardCodedStringLiteral return "CodeUsages"; } } private static class UsageInGeneratedCodeGroup extends UsageGroupBase { public static final UsageGroup INSTANCE = new UsageInGeneratedCodeGroup(); private UsageInGeneratedCodeGroup() { super(3); } @Override @NotNull public String getText(UsageView view) { return view == null ? UsageViewBundle.message("node.usages.in.generated.code") : view.getPresentation().getUsagesInGeneratedCodeString(); } public String toString() { return "UsagesInGeneratedCode"; } } private static class NonCodeUsageGroup extends UsageGroupBase { public static final UsageGroup INSTANCE = new NonCodeUsageGroup(); private NonCodeUsageGroup() { super(2); } @Override @NotNull public String getText(UsageView view) { return view == null ? UsageViewBundle.message("node.non.code.usages") : view.getPresentation().getNonCodeUsagesString(); } @Override public void update() { } public String toString() { //noinspection HardCodedStringLiteral return "NonCodeUsages"; } } private static class DynamicUsageGroup extends UsageGroupBase { public static final UsageGroup INSTANCE = new DynamicUsageGroup(); @NonNls private static final String DYNAMIC_CAPTION = "Dynamic usages"; public DynamicUsageGroup() { super(1); } @Override @NotNull public String getText(UsageView view) { if (view == null) { return DYNAMIC_CAPTION; } else { final String dynamicCodeUsagesString = view.getPresentation().getDynamicCodeUsagesString(); return dynamicCodeUsagesString == null ? DYNAMIC_CAPTION : dynamicCodeUsagesString; } } public String toString() { //noinspection HardCodedStringLiteral return "DynamicUsages"; } } @Nullable @Override protected UsageGroup getParentGroupFor(@NotNull Usage usage, @NotNull UsageTarget[] targets) { if (usage instanceof UsageInFile) { VirtualFile file = ((UsageInFile)usage).getFile(); if (file != null && GeneratedSourcesFilter.isGeneratedSourceByAnyFilter(file, myProject)) { return UsageInGeneratedCodeGroup.INSTANCE; } } if (usage instanceof PsiElementUsage) { if (usage instanceof UsageInfo2UsageAdapter) { final UsageInfo usageInfo = ((UsageInfo2UsageAdapter)usage).getUsageInfo(); if (usageInfo.isDynamicUsage()) { return DynamicUsageGroup.INSTANCE; } } if (((PsiElementUsage)usage).isNonCodeUsage()) { return NonCodeUsageGroup.INSTANCE; } else { return CodeUsageGroup.INSTANCE; } } return null; } }
signed/intellij-community
platform/usageView/src/com/intellij/usages/impl/rules/NonCodeUsageGroupingRule.java
Java
apache-2.0
4,612
(function () { function remap(fromValue, fromMin, fromMax, toMin, toMax) { // Compute the range of the data var fromRange = fromMax - fromMin, toRange = toMax - toMin, toValue; // If either range is 0, then the value can only be mapped to 1 value if (fromRange === 0) { return toMin + toRange / 2; } if (toRange === 0) { return toMin; } // (1) untranslate, (2) unscale, (3) rescale, (4) retranslate toValue = (fromValue - fromMin) / fromRange; toValue = (toRange * toValue) + toMin; return toValue; } /** * Enhance Filter. Adjusts the colors so that they span the widest * possible range (ie 0-255). Performs w*h pixel reads and w*h pixel * writes. * @function * @name Enhance * @memberof Kinetic.Filters * @param {Object} imageData * @author ippo615 * @example * node.cache(); * node.filters([Kinetic.Filters.Enhance]); * node.enhance(0.4); */ Kinetic.Filters.Enhance = function (imageData) { var data = imageData.data, nSubPixels = data.length, rMin = data[0], rMax = rMin, r, gMin = data[1], gMax = gMin, g, bMin = data[2], bMax = bMin, b, i; // If we are not enhancing anything - don't do any computation var enhanceAmount = this.enhance(); if( enhanceAmount === 0 ){ return; } // 1st Pass - find the min and max for each channel: for (i = 0; i < nSubPixels; i += 4) { r = data[i + 0]; if (r < rMin) { rMin = r; } else if (r > rMax) { rMax = r; } g = data[i + 1]; if (g < gMin) { gMin = g; } else if (g > gMax) { gMax = g; } b = data[i + 2]; if (b < bMin) { bMin = b; } else if (b > bMax) { bMax = b; } //a = data[i + 3]; //if (a < aMin) { aMin = a; } else //if (a > aMax) { aMax = a; } } // If there is only 1 level - don't remap if( rMax === rMin ){ rMax = 255; rMin = 0; } if( gMax === gMin ){ gMax = 255; gMin = 0; } if( bMax === bMin ){ bMax = 255; bMin = 0; } var rMid, rGoalMax,rGoalMin, gMid, gGoalMax,gGoalMin, bMid, bGoalMax,bGoalMin; // If the enhancement is positive - stretch the histogram if ( enhanceAmount > 0 ){ rGoalMax = rMax + enhanceAmount*(255-rMax); rGoalMin = rMin - enhanceAmount*(rMin-0); gGoalMax = gMax + enhanceAmount*(255-gMax); gGoalMin = gMin - enhanceAmount*(gMin-0); bGoalMax = bMax + enhanceAmount*(255-bMax); bGoalMin = bMin - enhanceAmount*(bMin-0); // If the enhancement is negative - compress the histogram } else { rMid = (rMax + rMin)*0.5; rGoalMax = rMax + enhanceAmount*(rMax-rMid); rGoalMin = rMin + enhanceAmount*(rMin-rMid); gMid = (gMax + gMin)*0.5; gGoalMax = gMax + enhanceAmount*(gMax-gMid); gGoalMin = gMin + enhanceAmount*(gMin-gMid); bMid = (bMax + bMin)*0.5; bGoalMax = bMax + enhanceAmount*(bMax-bMid); bGoalMin = bMin + enhanceAmount*(bMin-bMid); } // Pass 2 - remap everything, except the alpha for (i = 0; i < nSubPixels; i += 4) { data[i + 0] = remap(data[i + 0], rMin, rMax, rGoalMin, rGoalMax); data[i + 1] = remap(data[i + 1], gMin, gMax, gGoalMin, gGoalMax); data[i + 2] = remap(data[i + 2], bMin, bMax, bGoalMin, bGoalMax); //data[i + 3] = remap(data[i + 3], aMin, aMax, aGoalMin, aGoalMax); } }; Kinetic.Factory.addGetterSetter(Kinetic.Node, 'enhance', 0, null, Kinetic.Factory.afterSetFilter); /** * get/set enhance. Use with {@link Kinetic.Filters.Enhance} filter. * @name enhance * @method * @memberof Kinetic.Node.prototype * @param {Float} amount * @returns {Float} */ })();
puyanLiu/LPYFramework
前端练习/10canvas/文档/KineticJS-master/src/filters/Enhance.js
JavaScript
apache-2.0
4,121
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.gamelift.model.transform; import java.util.Map; import java.util.Map.Entry; import java.math.*; import java.nio.ByteBuffer; import com.amazonaws.services.gamelift.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * CreateAliasResult JSON Unmarshaller */ public class CreateAliasResultJsonUnmarshaller implements Unmarshaller<CreateAliasResult, JsonUnmarshallerContext> { public CreateAliasResult unmarshall(JsonUnmarshallerContext context) throws Exception { CreateAliasResult createAliasResult = new CreateAliasResult(); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) return null; while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("Alias", targetDepth)) { context.nextToken(); createAliasResult.setAlias(AliasJsonUnmarshaller .getInstance().unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals( currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return createAliasResult; } private static CreateAliasResultJsonUnmarshaller instance; public static CreateAliasResultJsonUnmarshaller getInstance() { if (instance == null) instance = new CreateAliasResultJsonUnmarshaller(); return instance; } }
flofreud/aws-sdk-java
aws-java-sdk-gamelift/src/main/java/com/amazonaws/services/gamelift/model/transform/CreateAliasResultJsonUnmarshaller.java
Java
apache-2.0
2,842
// Copyright 2009 the Sputnik authors. All rights reserved. // This code is governed by the BSD license found in the LICENSE file. /*--- info: The String.prototype.charAt.length property has the attribute DontEnum es5id: 15.5.4.4_A8 description: > Checking if enumerating the String.prototype.charAt.length property fails ---*/ ////////////////////////////////////////////////////////////////////////////// //CHECK#0 if (!(String.prototype.charAt.hasOwnProperty('length'))) { $ERROR('#0: String.prototype.charAt.hasOwnProperty(\'length\') return true. Actual: '+String.prototype.charAt.hasOwnProperty('length')); } // ////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////// // CHECK#1 if (String.prototype.charAt.propertyIsEnumerable('length')) { $ERROR('#1: String.prototype.charAt.propertyIsEnumerable(\'length\') return false. Actual: '+String.prototype.charAt.propertyIsEnumerable('length')); } // ////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////// // CHECK#2 var count=0; for (var p in String.prototype.charAt){ if (p==="length") count++; } if (count !== 0) { $ERROR('#2: count=0; for (p in String.prototype.charAt){if (p==="length") count++;}; count === 0. Actual: count ==='+count ); } // //////////////////////////////////////////////////////////////////////////////
m0ppers/arangodb
3rdParty/V8/V8-5.0.71.39/test/test262/data/test/built-ins/String/prototype/charAt/S15.5.4.4_A8.js
JavaScript
apache-2.0
1,510
package imagestreamimport import ( "fmt" "net/http" "time" "github.com/golang/glog" gocontext "golang.org/x/net/context" kapierrors "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/util/diff" utilruntime "k8s.io/apimachinery/pkg/util/runtime" "k8s.io/apimachinery/pkg/util/validation/field" apirequest "k8s.io/apiserver/pkg/endpoints/request" "k8s.io/apiserver/pkg/registry/rest" kapi "k8s.io/kubernetes/pkg/api" authorizationapi "github.com/openshift/origin/pkg/authorization/api" "github.com/openshift/origin/pkg/client" serverapi "github.com/openshift/origin/pkg/cmd/server/api" "github.com/openshift/origin/pkg/dockerregistry" "github.com/openshift/origin/pkg/image/api" imageapiv1 "github.com/openshift/origin/pkg/image/api/v1" "github.com/openshift/origin/pkg/image/importer" "github.com/openshift/origin/pkg/image/registry/imagestream" quotautil "github.com/openshift/origin/pkg/quota/util" ) // ImporterFunc returns an instance of the importer that should be used per invocation. type ImporterFunc func(r importer.RepositoryRetriever) importer.Interface // ImporterDockerRegistryFunc returns an instance of a docker client that should be used per invocation of import, // may be nil if no legacy import capability is required. type ImporterDockerRegistryFunc func() dockerregistry.Client // REST implements the RESTStorage interface for ImageStreamImport type REST struct { importFn ImporterFunc streams imagestream.Registry internalStreams rest.CreaterUpdater images rest.Creater secrets client.ImageStreamSecretsNamespacer transport http.RoundTripper insecureTransport http.RoundTripper clientFn ImporterDockerRegistryFunc strategy *strategy sarClient client.SubjectAccessReviewInterface } // NewREST returns a REST storage implementation that handles importing images. The clientFn argument is optional // if v1 Docker Registry importing is not required. Insecure transport is optional, and both transports should not // include client certs unless you wish to allow the entire cluster to import using those certs. func NewREST(importFn ImporterFunc, streams imagestream.Registry, internalStreams rest.CreaterUpdater, images rest.Creater, secrets client.ImageStreamSecretsNamespacer, transport, insecureTransport http.RoundTripper, clientFn ImporterDockerRegistryFunc, allowedImportRegistries *serverapi.AllowedRegistries, registryFn api.DefaultRegistryFunc, sarClient client.SubjectAccessReviewInterface, ) *REST { return &REST{ importFn: importFn, streams: streams, internalStreams: internalStreams, images: images, secrets: secrets, transport: transport, insecureTransport: insecureTransport, clientFn: clientFn, strategy: NewStrategy(allowedImportRegistries, registryFn), sarClient: sarClient, } } // New is only implemented to make REST implement RESTStorage func (r *REST) New() runtime.Object { return &api.ImageStreamImport{} } func (r *REST) Create(ctx apirequest.Context, obj runtime.Object) (runtime.Object, error) { isi, ok := obj.(*api.ImageStreamImport) if !ok { return nil, kapierrors.NewBadRequest(fmt.Sprintf("obj is not an ImageStreamImport: %#v", obj)) } inputMeta := isi.ObjectMeta if err := rest.BeforeCreate(r.strategy, ctx, obj); err != nil { return nil, err } // Check if the user is allowed to create Images or ImageStreamMappings. // In case the user is allowed to create them, do not validate the ImageStreamImport // registry location against the registry whitelist, but instead allow to create any // image from any registry. user, ok := apirequest.UserFrom(ctx) if !ok { return nil, kapierrors.NewBadRequest("unable to get user from context") } isCreateImage, err := r.sarClient.Create(authorizationapi.AddUserToSAR(user, &authorizationapi.SubjectAccessReview{ Action: authorizationapi.Action{ Verb: "create", Group: api.GroupName, Resource: "images", }, }, )) if err != nil { return nil, err } isCreateImageStreamMapping, err := r.sarClient.Create(authorizationapi.AddUserToSAR(user, &authorizationapi.SubjectAccessReview{ Action: authorizationapi.Action{ Verb: "create", Group: api.GroupName, Resource: "imagestreammapping", }, }, )) if err != nil { return nil, err } if !isCreateImage.Allowed && !isCreateImageStreamMapping.Allowed { if errs := r.strategy.ValidateAllowedRegistries(isi); len(errs) != 0 { return nil, kapierrors.NewInvalid(api.Kind("ImageStreamImport"), isi.Name, errs) } } namespace, ok := apirequest.NamespaceFrom(ctx) if !ok { return nil, kapierrors.NewBadRequest("a namespace must be specified to import images") } if r.clientFn != nil { if client := r.clientFn(); client != nil { ctx = apirequest.WithValue(ctx, importer.ContextKeyV1RegistryClient, client) } } // only load secrets if we need them credentials := importer.NewLazyCredentialsForSecrets(func() ([]kapi.Secret, error) { secrets, err := r.secrets.ImageStreamSecrets(namespace).Secrets(isi.Name, metav1.ListOptions{}) if err != nil { return nil, err } return secrets.Items, nil }) importCtx := importer.NewContext(r.transport, r.insecureTransport).WithCredentials(credentials) imports := r.importFn(importCtx) if err := imports.Import(ctx.(gocontext.Context), isi); err != nil { return nil, kapierrors.NewInternalError(err) } // if we encountered an error loading credentials and any images could not be retrieved with an access // related error, modify the message. // TODO: set a status cause if err := credentials.Err(); err != nil { for i, image := range isi.Status.Images { switch image.Status.Reason { case metav1.StatusReasonUnauthorized, metav1.StatusReasonForbidden: isi.Status.Images[i].Status.Message = fmt.Sprintf("Unable to load secrets for this image: %v; (%s)", err, image.Status.Message) } } if r := isi.Status.Repository; r != nil { switch r.Status.Reason { case metav1.StatusReasonUnauthorized, metav1.StatusReasonForbidden: r.Status.Message = fmt.Sprintf("Unable to load secrets for this repository: %v; (%s)", err, r.Status.Message) } } } // TODO: perform the transformation of the image stream and return it with the ISI if import is false // so that clients can see what the resulting object would look like. if !isi.Spec.Import { clearManifests(isi) return isi, nil } create := false stream, err := r.streams.GetImageStream(ctx, isi.Name, &metav1.GetOptions{}) if err != nil { if !kapierrors.IsNotFound(err) { return nil, err } // consistency check, stream must exist if len(inputMeta.ResourceVersion) > 0 || len(inputMeta.UID) > 0 { return nil, err } create = true stream = &api.ImageStream{ ObjectMeta: metav1.ObjectMeta{ Name: isi.Name, Namespace: namespace, Generation: 0, }, } } else { if len(inputMeta.ResourceVersion) > 0 && inputMeta.ResourceVersion != stream.ResourceVersion { glog.V(4).Infof("DEBUG: mismatch between requested ResourceVersion %s and located ResourceVersion %s", inputMeta.ResourceVersion, stream.ResourceVersion) return nil, kapierrors.NewConflict(api.Resource("imagestream"), inputMeta.Name, fmt.Errorf("the image stream was updated from %q to %q", inputMeta.ResourceVersion, stream.ResourceVersion)) } if len(inputMeta.UID) > 0 && inputMeta.UID != stream.UID { glog.V(4).Infof("DEBUG: mismatch between requested UID %s and located UID %s", inputMeta.UID, stream.UID) return nil, kapierrors.NewNotFound(api.Resource("imagestream"), inputMeta.Name) } } if stream.Annotations == nil { stream.Annotations = make(map[string]string) } now := metav1.Now() _, hasAnnotation := stream.Annotations[api.DockerImageRepositoryCheckAnnotation] nextGeneration := stream.Generation + 1 original, err := kapi.Scheme.DeepCopy(stream) if err != nil { return nil, err } // walk the retrieved images, ensuring each one exists in etcd importedImages := make(map[string]error) updatedImages := make(map[string]*api.Image) if spec := isi.Spec.Repository; spec != nil { for i, status := range isi.Status.Repository.Images { if checkImportFailure(status, stream, status.Tag, nextGeneration, now) { continue } image := status.Image ref, err := api.ParseDockerImageReference(image.DockerImageReference) if err != nil { utilruntime.HandleError(fmt.Errorf("unable to parse image reference during import: %v", err)) continue } from, err := api.ParseDockerImageReference(spec.From.Name) if err != nil { utilruntime.HandleError(fmt.Errorf("unable to parse from reference during import: %v", err)) continue } tag := ref.Tag if len(status.Tag) > 0 { tag = status.Tag } // we've imported a set of tags, ensure spec tag will point to this for later imports from.ID, from.Tag = "", tag if updated, ok := r.importSuccessful(ctx, image, stream, tag, from.Exact(), nextGeneration, now, spec.ImportPolicy, spec.ReferencePolicy, importedImages, updatedImages); ok { isi.Status.Repository.Images[i].Image = updated } } } for i, spec := range isi.Spec.Images { if spec.To == nil { continue } tag := spec.To.Name // record a failure condition status := isi.Status.Images[i] if checkImportFailure(status, stream, tag, nextGeneration, now) { // ensure that we have a spec tag set ensureSpecTag(stream, tag, spec.From.Name, spec.ImportPolicy, spec.ReferencePolicy, false) continue } // record success image := status.Image if updated, ok := r.importSuccessful(ctx, image, stream, tag, spec.From.Name, nextGeneration, now, spec.ImportPolicy, spec.ReferencePolicy, importedImages, updatedImages); ok { isi.Status.Images[i].Image = updated } } // TODO: should we allow partial failure? for _, err := range importedImages { if err != nil { return nil, err } } clearManifests(isi) // ensure defaulting is applied by round trip converting // TODO: convert to using versioned types. external, err := kapi.Scheme.ConvertToVersion(stream, imageapiv1.SchemeGroupVersion) if err != nil { return nil, err } kapi.Scheme.Default(external) internal, err := kapi.Scheme.ConvertToVersion(external, api.SchemeGroupVersion) if err != nil { return nil, err } stream = internal.(*api.ImageStream) // if and only if we have changes between the original and the imported stream, trigger // an import hasChanges := !kapi.Semantic.DeepEqual(original, stream) if create { stream.Annotations[api.DockerImageRepositoryCheckAnnotation] = now.UTC().Format(time.RFC3339) glog.V(4).Infof("create new stream: %#v", stream) obj, err = r.internalStreams.Create(ctx, stream) } else { if hasAnnotation && !hasChanges { glog.V(4).Infof("stream did not change: %#v", stream) obj, err = original.(*api.ImageStream), nil } else { if glog.V(4) { glog.V(4).Infof("updating stream %s", diff.ObjectDiff(original, stream)) } stream.Annotations[api.DockerImageRepositoryCheckAnnotation] = now.UTC().Format(time.RFC3339) obj, _, err = r.internalStreams.Update(ctx, stream.Name, rest.DefaultUpdatedObjectInfo(stream, kapi.Scheme)) } } if err != nil { // if we have am admission limit error then record the conditions on the original stream. Quota errors // will be recorded by the importer. if quotautil.IsErrorLimitExceeded(err) { originalStream := original.(*api.ImageStream) recordLimitExceededStatus(originalStream, stream, err, now, nextGeneration) var limitErr error obj, _, limitErr = r.internalStreams.Update(ctx, stream.Name, rest.DefaultUpdatedObjectInfo(originalStream, kapi.Scheme)) if limitErr != nil { utilruntime.HandleError(fmt.Errorf("failed to record limit exceeded status in image stream %s/%s: %v", stream.Namespace, stream.Name, limitErr)) } } return nil, err } isi.Status.Import = obj.(*api.ImageStream) return isi, nil } // recordLimitExceededStatus adds the limit err to any new tag. func recordLimitExceededStatus(originalStream *api.ImageStream, newStream *api.ImageStream, err error, now metav1.Time, nextGeneration int64) { for tag := range newStream.Status.Tags { if _, ok := originalStream.Status.Tags[tag]; !ok { api.SetTagConditions(originalStream, tag, newImportFailedCondition(err, nextGeneration, now)) } } } func checkImportFailure(status api.ImageImportStatus, stream *api.ImageStream, tag string, nextGeneration int64, now metav1.Time) bool { if status.Image != nil && status.Status.Status == metav1.StatusSuccess { return false } message := status.Status.Message if len(message) == 0 { message = "unknown error prevented import" } condition := api.TagEventCondition{ Type: api.ImportSuccess, Status: kapi.ConditionFalse, Message: message, Reason: string(status.Status.Reason), Generation: nextGeneration, LastTransitionTime: now, } if tag == "" { if len(status.Tag) > 0 { tag = status.Tag } else if status.Image != nil { if ref, err := api.ParseDockerImageReference(status.Image.DockerImageReference); err == nil { tag = ref.Tag } } } if !api.HasTagCondition(stream, tag, condition) { api.SetTagConditions(stream, tag, condition) if tagRef, ok := stream.Spec.Tags[tag]; ok { zero := int64(0) tagRef.Generation = &zero stream.Spec.Tags[tag] = tagRef } } return true } // ensureSpecTag guarantees that the spec tag is set with the provided from, importPolicy and referencePolicy. // If reset is passed, the tag will be overwritten. func ensureSpecTag(stream *api.ImageStream, tag, from string, importPolicy api.TagImportPolicy, referencePolicy api.TagReferencePolicy, reset bool) api.TagReference { if stream.Spec.Tags == nil { stream.Spec.Tags = make(map[string]api.TagReference) } specTag, ok := stream.Spec.Tags[tag] if ok && !reset { return specTag } specTag.From = &kapi.ObjectReference{ Kind: "DockerImage", Name: from, } zero := int64(0) specTag.Generation = &zero specTag.ImportPolicy = importPolicy specTag.ReferencePolicy = referencePolicy stream.Spec.Tags[tag] = specTag return specTag } // importSuccessful records a successful import into an image stream, setting the spec tag, status tag or conditions, and ensuring // the image is created in etcd. Images are cached so they are not created multiple times in a row (when multiple tags point to the // same image), and a failure to persist the image will be summarized before we update the stream. If an image was imported by this // operation, it *replaces* the imported image (from the remote repository) with the updated image. func (r *REST) importSuccessful( ctx apirequest.Context, image *api.Image, stream *api.ImageStream, tag string, from string, nextGeneration int64, now metav1.Time, importPolicy api.TagImportPolicy, referencePolicy api.TagReferencePolicy, importedImages map[string]error, updatedImages map[string]*api.Image, ) (*api.Image, bool) { r.strategy.PrepareImageForCreate(image) pullSpec, _ := api.MostAccuratePullSpec(image.DockerImageReference, image.Name, "") tagEvent := api.TagEvent{ Created: now, DockerImageReference: pullSpec, Image: image.Name, Generation: nextGeneration, } if stream.Spec.Tags == nil { stream.Spec.Tags = make(map[string]api.TagReference) } // ensure the spec and status tag match the imported image changed := api.DifferentTagEvent(stream, tag, tagEvent) || api.DifferentTagGeneration(stream, tag) specTag, ok := stream.Spec.Tags[tag] if changed || !ok { specTag = ensureSpecTag(stream, tag, from, importPolicy, referencePolicy, true) api.AddTagEventToImageStream(stream, tag, tagEvent) } // always reset the import policy specTag.ImportPolicy = importPolicy stream.Spec.Tags[tag] = specTag // import or reuse the image, and ensure tag conditions are set importErr, alreadyImported := importedImages[image.Name] if importErr != nil { api.SetTagConditions(stream, tag, newImportFailedCondition(importErr, nextGeneration, now)) } else { api.SetTagConditions(stream, tag) } // create the image if it does not exist, otherwise cache the updated status from the store for use by other tags if alreadyImported { if updatedImage, ok := updatedImages[image.Name]; ok { return updatedImage, true } return nil, false } updated, err := r.images.Create(ctx, image) switch { case kapierrors.IsAlreadyExists(err): if err := api.ImageWithMetadata(image); err != nil { glog.V(4).Infof("Unable to update image metadata during image import when image already exists %q: err", image.Name, err) } updated = image fallthrough case err == nil: updatedImage := updated.(*api.Image) updatedImages[image.Name] = updatedImage //isi.Status.Repository.Images[i].Image = updatedImage importedImages[image.Name] = nil return updatedImage, true default: importedImages[image.Name] = err } return nil, false } // clearManifests unsets the manifest for each object that does not request it func clearManifests(isi *api.ImageStreamImport) { for i := range isi.Status.Images { if !isi.Spec.Images[i].IncludeManifest { if isi.Status.Images[i].Image != nil { isi.Status.Images[i].Image.DockerImageManifest = "" isi.Status.Images[i].Image.DockerImageConfig = "" } } } if isi.Spec.Repository != nil && !isi.Spec.Repository.IncludeManifest { for i := range isi.Status.Repository.Images { if isi.Status.Repository.Images[i].Image != nil { isi.Status.Repository.Images[i].Image.DockerImageManifest = "" isi.Status.Repository.Images[i].Image.DockerImageConfig = "" } } } } func newImportFailedCondition(err error, gen int64, now metav1.Time) api.TagEventCondition { c := api.TagEventCondition{ Type: api.ImportSuccess, Status: kapi.ConditionFalse, Message: err.Error(), Generation: gen, LastTransitionTime: now, } if status, ok := err.(kapierrors.APIStatus); ok { s := status.Status() c.Reason, c.Message = string(s.Reason), s.Message } return c } func invalidStatus(kind, position string, errs ...*field.Error) metav1.Status { return kapierrors.NewInvalid(api.Kind(kind), position, errs).ErrStatus }
chlunde/origin
pkg/image/registry/imagestreamimport/rest.go
GO
apache-2.0
18,453
/* This file is part of SableCC ( http://sablecc.org ). * * See the NOTICE file distributed with this work for copyright information. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sablecc.sablecc.semantics; public class Context { private Grammar grammar; }
Herve-M/sablecc
src/org/sablecc/sablecc/semantics/Context.java
Java
apache-2.0
794
package network // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. import ( "github.com/Azure/go-autorest/autorest" "github.com/Azure/go-autorest/autorest/azure" "net/http" ) // InterfacesClient is the network Client type InterfacesClient struct { ManagementClient } // NewInterfacesClient creates an instance of the InterfacesClient client. func NewInterfacesClient(subscriptionID string) InterfacesClient { return NewInterfacesClientWithBaseURI(DefaultBaseURI, subscriptionID) } // NewInterfacesClientWithBaseURI creates an instance of the InterfacesClient client. func NewInterfacesClientWithBaseURI(baseURI string, subscriptionID string) InterfacesClient { return InterfacesClient{NewWithBaseURI(baseURI, subscriptionID)} } // CreateOrUpdate creates or updates a network interface. This method may poll for completion. Polling can be canceled // by passing the cancel channel argument. The channel will be used to cancel polling and any outstanding HTTP // requests. // // resourceGroupName is the name of the resource group. networkInterfaceName is the name of the network interface. // parameters is parameters supplied to the create or update network interface operation. func (client InterfacesClient) CreateOrUpdate(resourceGroupName string, networkInterfaceName string, parameters Interface, cancel <-chan struct{}) (<-chan Interface, <-chan error) { resultChan := make(chan Interface, 1) errChan := make(chan error, 1) go func() { var err error var result Interface defer func() { if err != nil { errChan <- err } resultChan <- result close(resultChan) close(errChan) }() req, err := client.CreateOrUpdatePreparer(resourceGroupName, networkInterfaceName, parameters, cancel) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfacesClient", "CreateOrUpdate", nil, "Failure preparing request") return } resp, err := client.CreateOrUpdateSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "network.InterfacesClient", "CreateOrUpdate", resp, "Failure sending request") return } result, err = client.CreateOrUpdateResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfacesClient", "CreateOrUpdate", resp, "Failure responding to request") } }() return resultChan, errChan } // CreateOrUpdatePreparer prepares the CreateOrUpdate request. func (client InterfacesClient) CreateOrUpdatePreparer(resourceGroupName string, networkInterfaceName string, parameters Interface, cancel <-chan struct{}) (*http.Request, error) { pathParameters := map[string]interface{}{ "networkInterfaceName": autorest.Encode("path", networkInterfaceName), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2017-03-01" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsJSON(), autorest.AsPut(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}", pathParameters), autorest.WithJSON(parameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare(&http.Request{Cancel: cancel}) } // CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the // http.Response Body if it receives an error. func (client InterfacesClient) CreateOrUpdateSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client), azure.DoPollForAsynchronous(client.PollingDelay)) } // CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always // closes the http.Response Body. func (client InterfacesClient) CreateOrUpdateResponder(resp *http.Response) (result Interface, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusCreated, http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // Delete deletes the specified network interface. This method may poll for completion. Polling can be canceled by // passing the cancel channel argument. The channel will be used to cancel polling and any outstanding HTTP requests. // // resourceGroupName is the name of the resource group. networkInterfaceName is the name of the network interface. func (client InterfacesClient) Delete(resourceGroupName string, networkInterfaceName string, cancel <-chan struct{}) (<-chan autorest.Response, <-chan error) { resultChan := make(chan autorest.Response, 1) errChan := make(chan error, 1) go func() { var err error var result autorest.Response defer func() { if err != nil { errChan <- err } resultChan <- result close(resultChan) close(errChan) }() req, err := client.DeletePreparer(resourceGroupName, networkInterfaceName, cancel) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfacesClient", "Delete", nil, "Failure preparing request") return } resp, err := client.DeleteSender(req) if err != nil { result.Response = resp err = autorest.NewErrorWithError(err, "network.InterfacesClient", "Delete", resp, "Failure sending request") return } result, err = client.DeleteResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfacesClient", "Delete", resp, "Failure responding to request") } }() return resultChan, errChan } // DeletePreparer prepares the Delete request. func (client InterfacesClient) DeletePreparer(resourceGroupName string, networkInterfaceName string, cancel <-chan struct{}) (*http.Request, error) { pathParameters := map[string]interface{}{ "networkInterfaceName": autorest.Encode("path", networkInterfaceName), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2017-03-01" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsDelete(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare(&http.Request{Cancel: cancel}) } // DeleteSender sends the Delete request. The method will close the // http.Response Body if it receives an error. func (client InterfacesClient) DeleteSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client), azure.DoPollForAsynchronous(client.PollingDelay)) } // DeleteResponder handles the response to the Delete request. The method always // closes the http.Response Body. func (client InterfacesClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusNoContent, http.StatusAccepted, http.StatusOK), autorest.ByClosing()) result.Response = resp return } // Get gets information about the specified network interface. // // resourceGroupName is the name of the resource group. networkInterfaceName is the name of the network interface. // expand is expands referenced resources. func (client InterfacesClient) Get(resourceGroupName string, networkInterfaceName string, expand string) (result Interface, err error) { req, err := client.GetPreparer(resourceGroupName, networkInterfaceName, expand) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfacesClient", "Get", nil, "Failure preparing request") return } resp, err := client.GetSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "network.InterfacesClient", "Get", resp, "Failure sending request") return } result, err = client.GetResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfacesClient", "Get", resp, "Failure responding to request") } return } // GetPreparer prepares the Get request. func (client InterfacesClient) GetPreparer(resourceGroupName string, networkInterfaceName string, expand string) (*http.Request, error) { pathParameters := map[string]interface{}{ "networkInterfaceName": autorest.Encode("path", networkInterfaceName), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2017-03-01" queryParameters := map[string]interface{}{ "api-version": APIVersion, } if len(expand) > 0 { queryParameters["$expand"] = autorest.Encode("query", expand) } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare(&http.Request{}) } // GetSender sends the Get request. The method will close the // http.Response Body if it receives an error. func (client InterfacesClient) GetSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client)) } // GetResponder handles the response to the Get request. The method always // closes the http.Response Body. func (client InterfacesClient) GetResponder(resp *http.Response) (result Interface, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // GetEffectiveRouteTable gets all route tables applied to a network interface. This method may poll for completion. // Polling can be canceled by passing the cancel channel argument. The channel will be used to cancel polling and any // outstanding HTTP requests. // // resourceGroupName is the name of the resource group. networkInterfaceName is the name of the network interface. func (client InterfacesClient) GetEffectiveRouteTable(resourceGroupName string, networkInterfaceName string, cancel <-chan struct{}) (<-chan EffectiveRouteListResult, <-chan error) { resultChan := make(chan EffectiveRouteListResult, 1) errChan := make(chan error, 1) go func() { var err error var result EffectiveRouteListResult defer func() { if err != nil { errChan <- err } resultChan <- result close(resultChan) close(errChan) }() req, err := client.GetEffectiveRouteTablePreparer(resourceGroupName, networkInterfaceName, cancel) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfacesClient", "GetEffectiveRouteTable", nil, "Failure preparing request") return } resp, err := client.GetEffectiveRouteTableSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "network.InterfacesClient", "GetEffectiveRouteTable", resp, "Failure sending request") return } result, err = client.GetEffectiveRouteTableResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfacesClient", "GetEffectiveRouteTable", resp, "Failure responding to request") } }() return resultChan, errChan } // GetEffectiveRouteTablePreparer prepares the GetEffectiveRouteTable request. func (client InterfacesClient) GetEffectiveRouteTablePreparer(resourceGroupName string, networkInterfaceName string, cancel <-chan struct{}) (*http.Request, error) { pathParameters := map[string]interface{}{ "networkInterfaceName": autorest.Encode("path", networkInterfaceName), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2017-03-01" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsPost(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveRouteTable", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare(&http.Request{Cancel: cancel}) } // GetEffectiveRouteTableSender sends the GetEffectiveRouteTable request. The method will close the // http.Response Body if it receives an error. func (client InterfacesClient) GetEffectiveRouteTableSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client), azure.DoPollForAsynchronous(client.PollingDelay)) } // GetEffectiveRouteTableResponder handles the response to the GetEffectiveRouteTable request. The method always // closes the http.Response Body. func (client InterfacesClient) GetEffectiveRouteTableResponder(resp *http.Response) (result EffectiveRouteListResult, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // GetVirtualMachineScaleSetNetworkInterface get the specified network interface in a virtual machine scale set. // // resourceGroupName is the name of the resource group. virtualMachineScaleSetName is the name of the virtual machine // scale set. virtualmachineIndex is the virtual machine index. networkInterfaceName is the name of the network // interface. expand is expands referenced resources. func (client InterfacesClient) GetVirtualMachineScaleSetNetworkInterface(resourceGroupName string, virtualMachineScaleSetName string, virtualmachineIndex string, networkInterfaceName string, expand string) (result Interface, err error) { req, err := client.GetVirtualMachineScaleSetNetworkInterfacePreparer(resourceGroupName, virtualMachineScaleSetName, virtualmachineIndex, networkInterfaceName, expand) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfacesClient", "GetVirtualMachineScaleSetNetworkInterface", nil, "Failure preparing request") return } resp, err := client.GetVirtualMachineScaleSetNetworkInterfaceSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "network.InterfacesClient", "GetVirtualMachineScaleSetNetworkInterface", resp, "Failure sending request") return } result, err = client.GetVirtualMachineScaleSetNetworkInterfaceResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfacesClient", "GetVirtualMachineScaleSetNetworkInterface", resp, "Failure responding to request") } return } // GetVirtualMachineScaleSetNetworkInterfacePreparer prepares the GetVirtualMachineScaleSetNetworkInterface request. func (client InterfacesClient) GetVirtualMachineScaleSetNetworkInterfacePreparer(resourceGroupName string, virtualMachineScaleSetName string, virtualmachineIndex string, networkInterfaceName string, expand string) (*http.Request, error) { pathParameters := map[string]interface{}{ "networkInterfaceName": autorest.Encode("path", networkInterfaceName), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), "virtualmachineIndex": autorest.Encode("path", virtualmachineIndex), "virtualMachineScaleSetName": autorest.Encode("path", virtualMachineScaleSetName), } const APIVersion = "2017-03-30" queryParameters := map[string]interface{}{ "api-version": APIVersion, } if len(expand) > 0 { queryParameters["$expand"] = autorest.Encode("query", expand) } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare(&http.Request{}) } // GetVirtualMachineScaleSetNetworkInterfaceSender sends the GetVirtualMachineScaleSetNetworkInterface request. The method will close the // http.Response Body if it receives an error. func (client InterfacesClient) GetVirtualMachineScaleSetNetworkInterfaceSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client)) } // GetVirtualMachineScaleSetNetworkInterfaceResponder handles the response to the GetVirtualMachineScaleSetNetworkInterface request. The method always // closes the http.Response Body. func (client InterfacesClient) GetVirtualMachineScaleSetNetworkInterfaceResponder(resp *http.Response) (result Interface, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // List gets all network interfaces in a resource group. // // resourceGroupName is the name of the resource group. func (client InterfacesClient) List(resourceGroupName string) (result InterfaceListResult, err error) { req, err := client.ListPreparer(resourceGroupName) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfacesClient", "List", nil, "Failure preparing request") return } resp, err := client.ListSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "network.InterfacesClient", "List", resp, "Failure sending request") return } result, err = client.ListResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfacesClient", "List", resp, "Failure responding to request") } return } // ListPreparer prepares the List request. func (client InterfacesClient) ListPreparer(resourceGroupName string) (*http.Request, error) { pathParameters := map[string]interface{}{ "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2017-03-01" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare(&http.Request{}) } // ListSender sends the List request. The method will close the // http.Response Body if it receives an error. func (client InterfacesClient) ListSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client)) } // ListResponder handles the response to the List request. The method always // closes the http.Response Body. func (client InterfacesClient) ListResponder(resp *http.Response) (result InterfaceListResult, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // ListNextResults retrieves the next set of results, if any. func (client InterfacesClient) ListNextResults(lastResults InterfaceListResult) (result InterfaceListResult, err error) { req, err := lastResults.InterfaceListResultPreparer() if err != nil { return result, autorest.NewErrorWithError(err, "network.InterfacesClient", "List", nil, "Failure preparing next results request") } if req == nil { return } resp, err := client.ListSender(req) if err != nil { result.Response = autorest.Response{Response: resp} return result, autorest.NewErrorWithError(err, "network.InterfacesClient", "List", resp, "Failure sending next results request") } result, err = client.ListResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfacesClient", "List", resp, "Failure responding to next results request") } return } // ListComplete gets all elements from the list without paging. func (client InterfacesClient) ListComplete(resourceGroupName string, cancel <-chan struct{}) (<-chan Interface, <-chan error) { resultChan := make(chan Interface) errChan := make(chan error, 1) go func() { defer func() { close(resultChan) close(errChan) }() list, err := client.List(resourceGroupName) if err != nil { errChan <- err return } if list.Value != nil { for _, item := range *list.Value { select { case <-cancel: return case resultChan <- item: // Intentionally left blank } } } for list.NextLink != nil { list, err = client.ListNextResults(list) if err != nil { errChan <- err return } if list.Value != nil { for _, item := range *list.Value { select { case <-cancel: return case resultChan <- item: // Intentionally left blank } } } } }() return resultChan, errChan } // ListAll gets all network interfaces in a subscription. func (client InterfacesClient) ListAll() (result InterfaceListResult, err error) { req, err := client.ListAllPreparer() if err != nil { err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListAll", nil, "Failure preparing request") return } resp, err := client.ListAllSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListAll", resp, "Failure sending request") return } result, err = client.ListAllResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListAll", resp, "Failure responding to request") } return } // ListAllPreparer prepares the ListAll request. func (client InterfacesClient) ListAllPreparer() (*http.Request, error) { pathParameters := map[string]interface{}{ "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2017-03-01" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.Network/networkInterfaces", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare(&http.Request{}) } // ListAllSender sends the ListAll request. The method will close the // http.Response Body if it receives an error. func (client InterfacesClient) ListAllSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client)) } // ListAllResponder handles the response to the ListAll request. The method always // closes the http.Response Body. func (client InterfacesClient) ListAllResponder(resp *http.Response) (result InterfaceListResult, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // ListAllNextResults retrieves the next set of results, if any. func (client InterfacesClient) ListAllNextResults(lastResults InterfaceListResult) (result InterfaceListResult, err error) { req, err := lastResults.InterfaceListResultPreparer() if err != nil { return result, autorest.NewErrorWithError(err, "network.InterfacesClient", "ListAll", nil, "Failure preparing next results request") } if req == nil { return } resp, err := client.ListAllSender(req) if err != nil { result.Response = autorest.Response{Response: resp} return result, autorest.NewErrorWithError(err, "network.InterfacesClient", "ListAll", resp, "Failure sending next results request") } result, err = client.ListAllResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListAll", resp, "Failure responding to next results request") } return } // ListAllComplete gets all elements from the list without paging. func (client InterfacesClient) ListAllComplete(cancel <-chan struct{}) (<-chan Interface, <-chan error) { resultChan := make(chan Interface) errChan := make(chan error, 1) go func() { defer func() { close(resultChan) close(errChan) }() list, err := client.ListAll() if err != nil { errChan <- err return } if list.Value != nil { for _, item := range *list.Value { select { case <-cancel: return case resultChan <- item: // Intentionally left blank } } } for list.NextLink != nil { list, err = client.ListAllNextResults(list) if err != nil { errChan <- err return } if list.Value != nil { for _, item := range *list.Value { select { case <-cancel: return case resultChan <- item: // Intentionally left blank } } } } }() return resultChan, errChan } // ListEffectiveNetworkSecurityGroups gets all network security groups applied to a network interface. This method may // poll for completion. Polling can be canceled by passing the cancel channel argument. The channel will be used to // cancel polling and any outstanding HTTP requests. // // resourceGroupName is the name of the resource group. networkInterfaceName is the name of the network interface. func (client InterfacesClient) ListEffectiveNetworkSecurityGroups(resourceGroupName string, networkInterfaceName string, cancel <-chan struct{}) (<-chan EffectiveNetworkSecurityGroupListResult, <-chan error) { resultChan := make(chan EffectiveNetworkSecurityGroupListResult, 1) errChan := make(chan error, 1) go func() { var err error var result EffectiveNetworkSecurityGroupListResult defer func() { if err != nil { errChan <- err } resultChan <- result close(resultChan) close(errChan) }() req, err := client.ListEffectiveNetworkSecurityGroupsPreparer(resourceGroupName, networkInterfaceName, cancel) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListEffectiveNetworkSecurityGroups", nil, "Failure preparing request") return } resp, err := client.ListEffectiveNetworkSecurityGroupsSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListEffectiveNetworkSecurityGroups", resp, "Failure sending request") return } result, err = client.ListEffectiveNetworkSecurityGroupsResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListEffectiveNetworkSecurityGroups", resp, "Failure responding to request") } }() return resultChan, errChan } // ListEffectiveNetworkSecurityGroupsPreparer prepares the ListEffectiveNetworkSecurityGroups request. func (client InterfacesClient) ListEffectiveNetworkSecurityGroupsPreparer(resourceGroupName string, networkInterfaceName string, cancel <-chan struct{}) (*http.Request, error) { pathParameters := map[string]interface{}{ "networkInterfaceName": autorest.Encode("path", networkInterfaceName), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2017-03-01" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsPost(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveNetworkSecurityGroups", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare(&http.Request{Cancel: cancel}) } // ListEffectiveNetworkSecurityGroupsSender sends the ListEffectiveNetworkSecurityGroups request. The method will close the // http.Response Body if it receives an error. func (client InterfacesClient) ListEffectiveNetworkSecurityGroupsSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client), azure.DoPollForAsynchronous(client.PollingDelay)) } // ListEffectiveNetworkSecurityGroupsResponder handles the response to the ListEffectiveNetworkSecurityGroups request. The method always // closes the http.Response Body. func (client InterfacesClient) ListEffectiveNetworkSecurityGroupsResponder(resp *http.Response) (result EffectiveNetworkSecurityGroupListResult, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // ListVirtualMachineScaleSetNetworkInterfaces gets all network interfaces in a virtual machine scale set. // // resourceGroupName is the name of the resource group. virtualMachineScaleSetName is the name of the virtual machine // scale set. func (client InterfacesClient) ListVirtualMachineScaleSetNetworkInterfaces(resourceGroupName string, virtualMachineScaleSetName string) (result InterfaceListResult, err error) { req, err := client.ListVirtualMachineScaleSetNetworkInterfacesPreparer(resourceGroupName, virtualMachineScaleSetName) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListVirtualMachineScaleSetNetworkInterfaces", nil, "Failure preparing request") return } resp, err := client.ListVirtualMachineScaleSetNetworkInterfacesSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListVirtualMachineScaleSetNetworkInterfaces", resp, "Failure sending request") return } result, err = client.ListVirtualMachineScaleSetNetworkInterfacesResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListVirtualMachineScaleSetNetworkInterfaces", resp, "Failure responding to request") } return } // ListVirtualMachineScaleSetNetworkInterfacesPreparer prepares the ListVirtualMachineScaleSetNetworkInterfaces request. func (client InterfacesClient) ListVirtualMachineScaleSetNetworkInterfacesPreparer(resourceGroupName string, virtualMachineScaleSetName string) (*http.Request, error) { pathParameters := map[string]interface{}{ "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), "virtualMachineScaleSetName": autorest.Encode("path", virtualMachineScaleSetName), } const APIVersion = "2017-03-30" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/networkInterfaces", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare(&http.Request{}) } // ListVirtualMachineScaleSetNetworkInterfacesSender sends the ListVirtualMachineScaleSetNetworkInterfaces request. The method will close the // http.Response Body if it receives an error. func (client InterfacesClient) ListVirtualMachineScaleSetNetworkInterfacesSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client)) } // ListVirtualMachineScaleSetNetworkInterfacesResponder handles the response to the ListVirtualMachineScaleSetNetworkInterfaces request. The method always // closes the http.Response Body. func (client InterfacesClient) ListVirtualMachineScaleSetNetworkInterfacesResponder(resp *http.Response) (result InterfaceListResult, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // ListVirtualMachineScaleSetNetworkInterfacesNextResults retrieves the next set of results, if any. func (client InterfacesClient) ListVirtualMachineScaleSetNetworkInterfacesNextResults(lastResults InterfaceListResult) (result InterfaceListResult, err error) { req, err := lastResults.InterfaceListResultPreparer() if err != nil { return result, autorest.NewErrorWithError(err, "network.InterfacesClient", "ListVirtualMachineScaleSetNetworkInterfaces", nil, "Failure preparing next results request") } if req == nil { return } resp, err := client.ListVirtualMachineScaleSetNetworkInterfacesSender(req) if err != nil { result.Response = autorest.Response{Response: resp} return result, autorest.NewErrorWithError(err, "network.InterfacesClient", "ListVirtualMachineScaleSetNetworkInterfaces", resp, "Failure sending next results request") } result, err = client.ListVirtualMachineScaleSetNetworkInterfacesResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListVirtualMachineScaleSetNetworkInterfaces", resp, "Failure responding to next results request") } return } // ListVirtualMachineScaleSetNetworkInterfacesComplete gets all elements from the list without paging. func (client InterfacesClient) ListVirtualMachineScaleSetNetworkInterfacesComplete(resourceGroupName string, virtualMachineScaleSetName string, cancel <-chan struct{}) (<-chan Interface, <-chan error) { resultChan := make(chan Interface) errChan := make(chan error, 1) go func() { defer func() { close(resultChan) close(errChan) }() list, err := client.ListVirtualMachineScaleSetNetworkInterfaces(resourceGroupName, virtualMachineScaleSetName) if err != nil { errChan <- err return } if list.Value != nil { for _, item := range *list.Value { select { case <-cancel: return case resultChan <- item: // Intentionally left blank } } } for list.NextLink != nil { list, err = client.ListVirtualMachineScaleSetNetworkInterfacesNextResults(list) if err != nil { errChan <- err return } if list.Value != nil { for _, item := range *list.Value { select { case <-cancel: return case resultChan <- item: // Intentionally left blank } } } } }() return resultChan, errChan } // ListVirtualMachineScaleSetVMNetworkInterfaces gets information about all network interfaces in a virtual machine in // a virtual machine scale set. // // resourceGroupName is the name of the resource group. virtualMachineScaleSetName is the name of the virtual machine // scale set. virtualmachineIndex is the virtual machine index. func (client InterfacesClient) ListVirtualMachineScaleSetVMNetworkInterfaces(resourceGroupName string, virtualMachineScaleSetName string, virtualmachineIndex string) (result InterfaceListResult, err error) { req, err := client.ListVirtualMachineScaleSetVMNetworkInterfacesPreparer(resourceGroupName, virtualMachineScaleSetName, virtualmachineIndex) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListVirtualMachineScaleSetVMNetworkInterfaces", nil, "Failure preparing request") return } resp, err := client.ListVirtualMachineScaleSetVMNetworkInterfacesSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListVirtualMachineScaleSetVMNetworkInterfaces", resp, "Failure sending request") return } result, err = client.ListVirtualMachineScaleSetVMNetworkInterfacesResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListVirtualMachineScaleSetVMNetworkInterfaces", resp, "Failure responding to request") } return } // ListVirtualMachineScaleSetVMNetworkInterfacesPreparer prepares the ListVirtualMachineScaleSetVMNetworkInterfaces request. func (client InterfacesClient) ListVirtualMachineScaleSetVMNetworkInterfacesPreparer(resourceGroupName string, virtualMachineScaleSetName string, virtualmachineIndex string) (*http.Request, error) { pathParameters := map[string]interface{}{ "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), "virtualmachineIndex": autorest.Encode("path", virtualmachineIndex), "virtualMachineScaleSetName": autorest.Encode("path", virtualMachineScaleSetName), } const APIVersion = "2017-03-30" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare(&http.Request{}) } // ListVirtualMachineScaleSetVMNetworkInterfacesSender sends the ListVirtualMachineScaleSetVMNetworkInterfaces request. The method will close the // http.Response Body if it receives an error. func (client InterfacesClient) ListVirtualMachineScaleSetVMNetworkInterfacesSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client)) } // ListVirtualMachineScaleSetVMNetworkInterfacesResponder handles the response to the ListVirtualMachineScaleSetVMNetworkInterfaces request. The method always // closes the http.Response Body. func (client InterfacesClient) ListVirtualMachineScaleSetVMNetworkInterfacesResponder(resp *http.Response) (result InterfaceListResult, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // ListVirtualMachineScaleSetVMNetworkInterfacesNextResults retrieves the next set of results, if any. func (client InterfacesClient) ListVirtualMachineScaleSetVMNetworkInterfacesNextResults(lastResults InterfaceListResult) (result InterfaceListResult, err error) { req, err := lastResults.InterfaceListResultPreparer() if err != nil { return result, autorest.NewErrorWithError(err, "network.InterfacesClient", "ListVirtualMachineScaleSetVMNetworkInterfaces", nil, "Failure preparing next results request") } if req == nil { return } resp, err := client.ListVirtualMachineScaleSetVMNetworkInterfacesSender(req) if err != nil { result.Response = autorest.Response{Response: resp} return result, autorest.NewErrorWithError(err, "network.InterfacesClient", "ListVirtualMachineScaleSetVMNetworkInterfaces", resp, "Failure sending next results request") } result, err = client.ListVirtualMachineScaleSetVMNetworkInterfacesResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfacesClient", "ListVirtualMachineScaleSetVMNetworkInterfaces", resp, "Failure responding to next results request") } return } // ListVirtualMachineScaleSetVMNetworkInterfacesComplete gets all elements from the list without paging. func (client InterfacesClient) ListVirtualMachineScaleSetVMNetworkInterfacesComplete(resourceGroupName string, virtualMachineScaleSetName string, virtualmachineIndex string, cancel <-chan struct{}) (<-chan Interface, <-chan error) { resultChan := make(chan Interface) errChan := make(chan error, 1) go func() { defer func() { close(resultChan) close(errChan) }() list, err := client.ListVirtualMachineScaleSetVMNetworkInterfaces(resourceGroupName, virtualMachineScaleSetName, virtualmachineIndex) if err != nil { errChan <- err return } if list.Value != nil { for _, item := range *list.Value { select { case <-cancel: return case resultChan <- item: // Intentionally left blank } } } for list.NextLink != nil { list, err = client.ListVirtualMachineScaleSetVMNetworkInterfacesNextResults(list) if err != nil { errChan <- err return } if list.Value != nil { for _, item := range *list.Value { select { case <-cancel: return case resultChan <- item: // Intentionally left blank } } } } }() return resultChan, errChan }
wojtekzw/imageproxy
vendor/github.com/Azure/azure-sdk-for-go/services/network/mgmt/2017-03-01/network/interfaces.go
GO
apache-2.0
42,507
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.operator; import com.facebook.presto.sql.planner.plan.PlanNodeId; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableList; import io.airlift.units.DataSize; import io.airlift.units.Duration; import javax.annotation.Nullable; import javax.annotation.concurrent.Immutable; import java.util.Optional; import static com.google.common.base.Preconditions.checkArgument; import static io.airlift.units.DataSize.succinctBytes; import static java.util.Objects.requireNonNull; import static java.util.concurrent.TimeUnit.NANOSECONDS; @Immutable public class OperatorStats { private final int operatorId; private final PlanNodeId planNodeId; private final String operatorType; private final long totalDrivers; private final long addInputCalls; private final Duration addInputWall; private final Duration addInputCpu; private final Duration addInputUser; private final DataSize inputDataSize; private final long inputPositions; private final double sumSquaredInputPositions; private final long getOutputCalls; private final Duration getOutputWall; private final Duration getOutputCpu; private final Duration getOutputUser; private final DataSize outputDataSize; private final long outputPositions; private final Duration blockedWall; private final long finishCalls; private final Duration finishWall; private final Duration finishCpu; private final Duration finishUser; private final DataSize memoryReservation; private final DataSize systemMemoryReservation; private final Optional<BlockedReason> blockedReason; private final OperatorInfo info; @JsonCreator public OperatorStats( @JsonProperty("operatorId") int operatorId, @JsonProperty("planNodeId") PlanNodeId planNodeId, @JsonProperty("operatorType") String operatorType, @JsonProperty("totalDrivers") long totalDrivers, @JsonProperty("addInputCalls") long addInputCalls, @JsonProperty("addInputWall") Duration addInputWall, @JsonProperty("addInputCpu") Duration addInputCpu, @JsonProperty("addInputUser") Duration addInputUser, @JsonProperty("inputDataSize") DataSize inputDataSize, @JsonProperty("inputPositions") long inputPositions, @JsonProperty("sumSquaredInputPositions") double sumSquaredInputPositions, @JsonProperty("getOutputCalls") long getOutputCalls, @JsonProperty("getOutputWall") Duration getOutputWall, @JsonProperty("getOutputCpu") Duration getOutputCpu, @JsonProperty("getOutputUser") Duration getOutputUser, @JsonProperty("outputDataSize") DataSize outputDataSize, @JsonProperty("outputPositions") long outputPositions, @JsonProperty("blockedWall") Duration blockedWall, @JsonProperty("finishCalls") long finishCalls, @JsonProperty("finishWall") Duration finishWall, @JsonProperty("finishCpu") Duration finishCpu, @JsonProperty("finishUser") Duration finishUser, @JsonProperty("memoryReservation") DataSize memoryReservation, @JsonProperty("systemMemoryReservation") DataSize systemMemoryReservation, @JsonProperty("blockedReason") Optional<BlockedReason> blockedReason, @JsonProperty("info") OperatorInfo info) { checkArgument(operatorId >= 0, "operatorId is negative"); this.operatorId = operatorId; this.planNodeId = requireNonNull(planNodeId, "planNodeId is null"); this.operatorType = requireNonNull(operatorType, "operatorType is null"); this.totalDrivers = totalDrivers; this.addInputCalls = addInputCalls; this.addInputWall = requireNonNull(addInputWall, "addInputWall is null"); this.addInputCpu = requireNonNull(addInputCpu, "addInputCpu is null"); this.addInputUser = requireNonNull(addInputUser, "addInputUser is null"); this.inputDataSize = requireNonNull(inputDataSize, "inputDataSize is null"); checkArgument(inputPositions >= 0, "inputPositions is negative"); this.inputPositions = inputPositions; this.sumSquaredInputPositions = sumSquaredInputPositions; this.getOutputCalls = getOutputCalls; this.getOutputWall = requireNonNull(getOutputWall, "getOutputWall is null"); this.getOutputCpu = requireNonNull(getOutputCpu, "getOutputCpu is null"); this.getOutputUser = requireNonNull(getOutputUser, "getOutputUser is null"); this.outputDataSize = requireNonNull(outputDataSize, "outputDataSize is null"); checkArgument(outputPositions >= 0, "outputPositions is negative"); this.outputPositions = outputPositions; this.blockedWall = requireNonNull(blockedWall, "blockedWall is null"); this.finishCalls = finishCalls; this.finishWall = requireNonNull(finishWall, "finishWall is null"); this.finishCpu = requireNonNull(finishCpu, "finishCpu is null"); this.finishUser = requireNonNull(finishUser, "finishUser is null"); this.memoryReservation = requireNonNull(memoryReservation, "memoryReservation is null"); this.systemMemoryReservation = requireNonNull(systemMemoryReservation, "systemMemoryReservation is null"); this.blockedReason = blockedReason; this.info = info; } @JsonProperty public int getOperatorId() { return operatorId; } @JsonProperty public PlanNodeId getPlanNodeId() { return planNodeId; } @JsonProperty public String getOperatorType() { return operatorType; } @JsonProperty public long getTotalDrivers() { return totalDrivers; } @JsonProperty public long getAddInputCalls() { return addInputCalls; } @JsonProperty public Duration getAddInputWall() { return addInputWall; } @JsonProperty public Duration getAddInputCpu() { return addInputCpu; } @JsonProperty public Duration getAddInputUser() { return addInputUser; } @JsonProperty public DataSize getInputDataSize() { return inputDataSize; } @JsonProperty public long getInputPositions() { return inputPositions; } @JsonProperty public double getSumSquaredInputPositions() { return sumSquaredInputPositions; } @JsonProperty public long getGetOutputCalls() { return getOutputCalls; } @JsonProperty public Duration getGetOutputWall() { return getOutputWall; } @JsonProperty public Duration getGetOutputCpu() { return getOutputCpu; } @JsonProperty public Duration getGetOutputUser() { return getOutputUser; } @JsonProperty public DataSize getOutputDataSize() { return outputDataSize; } @JsonProperty public long getOutputPositions() { return outputPositions; } @JsonProperty public Duration getBlockedWall() { return blockedWall; } @JsonProperty public long getFinishCalls() { return finishCalls; } @JsonProperty public Duration getFinishWall() { return finishWall; } @JsonProperty public Duration getFinishCpu() { return finishCpu; } @JsonProperty public Duration getFinishUser() { return finishUser; } @JsonProperty public DataSize getMemoryReservation() { return memoryReservation; } @JsonProperty public DataSize getSystemMemoryReservation() { return systemMemoryReservation; } @JsonProperty public Optional<BlockedReason> getBlockedReason() { return blockedReason; } @Nullable @JsonProperty public OperatorInfo getInfo() { return info; } public OperatorStats add(OperatorStats... operators) { return add(ImmutableList.copyOf(operators)); } public OperatorStats add(Iterable<OperatorStats> operators) { long totalDrivers = this.totalDrivers; long addInputCalls = this.addInputCalls; long addInputWall = this.addInputWall.roundTo(NANOSECONDS); long addInputCpu = this.addInputCpu.roundTo(NANOSECONDS); long addInputUser = this.addInputUser.roundTo(NANOSECONDS); long inputDataSize = this.inputDataSize.toBytes(); long inputPositions = this.inputPositions; double sumSquaredInputPositions = this.sumSquaredInputPositions; long getOutputCalls = this.getOutputCalls; long getOutputWall = this.getOutputWall.roundTo(NANOSECONDS); long getOutputCpu = this.getOutputCpu.roundTo(NANOSECONDS); long getOutputUser = this.getOutputUser.roundTo(NANOSECONDS); long outputDataSize = this.outputDataSize.toBytes(); long outputPositions = this.outputPositions; long blockedWall = this.blockedWall.roundTo(NANOSECONDS); long finishCalls = this.finishCalls; long finishWall = this.finishWall.roundTo(NANOSECONDS); long finishCpu = this.finishCpu.roundTo(NANOSECONDS); long finishUser = this.finishUser.roundTo(NANOSECONDS); long memoryReservation = this.memoryReservation.toBytes(); long systemMemoryReservation = this.systemMemoryReservation.toBytes(); Optional<BlockedReason> blockedReason = this.blockedReason; Mergeable<OperatorInfo> base = getMergeableInfoOrNull(info); for (OperatorStats operator : operators) { checkArgument(operator.getOperatorId() == operatorId, "Expected operatorId to be %s but was %s", operatorId, operator.getOperatorId()); totalDrivers += operator.totalDrivers; addInputCalls += operator.getAddInputCalls(); addInputWall += operator.getAddInputWall().roundTo(NANOSECONDS); addInputCpu += operator.getAddInputCpu().roundTo(NANOSECONDS); addInputUser += operator.getAddInputUser().roundTo(NANOSECONDS); inputDataSize += operator.getInputDataSize().toBytes(); inputPositions += operator.getInputPositions(); sumSquaredInputPositions += operator.getSumSquaredInputPositions(); getOutputCalls += operator.getGetOutputCalls(); getOutputWall += operator.getGetOutputWall().roundTo(NANOSECONDS); getOutputCpu += operator.getGetOutputCpu().roundTo(NANOSECONDS); getOutputUser += operator.getGetOutputUser().roundTo(NANOSECONDS); outputDataSize += operator.getOutputDataSize().toBytes(); outputPositions += operator.getOutputPositions(); finishCalls += operator.getFinishCalls(); finishWall += operator.getFinishWall().roundTo(NANOSECONDS); finishCpu += operator.getFinishCpu().roundTo(NANOSECONDS); finishUser += operator.getFinishUser().roundTo(NANOSECONDS); blockedWall += operator.getBlockedWall().roundTo(NANOSECONDS); memoryReservation += operator.getMemoryReservation().toBytes(); systemMemoryReservation += operator.getSystemMemoryReservation().toBytes(); if (operator.getBlockedReason().isPresent()) { blockedReason = operator.getBlockedReason(); } OperatorInfo info = operator.getInfo(); if (base != null && info != null && base.getClass() == info.getClass()) { base = mergeInfo(base, info); } } return new OperatorStats( operatorId, planNodeId, operatorType, totalDrivers, addInputCalls, new Duration(addInputWall, NANOSECONDS).convertToMostSuccinctTimeUnit(), new Duration(addInputCpu, NANOSECONDS).convertToMostSuccinctTimeUnit(), new Duration(addInputUser, NANOSECONDS).convertToMostSuccinctTimeUnit(), succinctBytes(inputDataSize), inputPositions, sumSquaredInputPositions, getOutputCalls, new Duration(getOutputWall, NANOSECONDS).convertToMostSuccinctTimeUnit(), new Duration(getOutputCpu, NANOSECONDS).convertToMostSuccinctTimeUnit(), new Duration(getOutputUser, NANOSECONDS).convertToMostSuccinctTimeUnit(), succinctBytes(outputDataSize), outputPositions, new Duration(blockedWall, NANOSECONDS).convertToMostSuccinctTimeUnit(), finishCalls, new Duration(finishWall, NANOSECONDS).convertToMostSuccinctTimeUnit(), new Duration(finishCpu, NANOSECONDS).convertToMostSuccinctTimeUnit(), new Duration(finishUser, NANOSECONDS).convertToMostSuccinctTimeUnit(), succinctBytes(memoryReservation), succinctBytes(systemMemoryReservation), blockedReason, (OperatorInfo) base); } @SuppressWarnings("unchecked") private static Mergeable<OperatorInfo> getMergeableInfoOrNull(OperatorInfo info) { Mergeable<OperatorInfo> base = null; if (info instanceof Mergeable) { base = (Mergeable<OperatorInfo>) info; } return base; } @SuppressWarnings("unchecked") private static <T> Mergeable<T> mergeInfo(Mergeable<T> base, T other) { return (Mergeable<T>) base.mergeWith(other); } public OperatorStats summarize() { return new OperatorStats( operatorId, planNodeId, operatorType, totalDrivers, addInputCalls, addInputWall, addInputCpu, addInputUser, inputDataSize, inputPositions, sumSquaredInputPositions, getOutputCalls, getOutputWall, getOutputCpu, getOutputUser, outputDataSize, outputPositions, blockedWall, finishCalls, finishWall, finishCpu, finishUser, memoryReservation, systemMemoryReservation, blockedReason, (info != null && info.isFinal()) ? info : null); } }
marsorp/blog
presto166/presto-main/src/main/java/com/facebook/presto/operator/OperatorStats.java
Java
apache-2.0
15,302
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.client; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.GenericAction; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptAction; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.flush.FlushAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.delete.DeleteAction; import org.elasticsearch.action.get.GetAction; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import java.util.HashMap; import java.util.Map; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; public abstract class AbstractClientHeadersTestCase extends ESTestCase { protected static final Settings HEADER_SETTINGS = Settings.builder() .put(ThreadContext.PREFIX + ".key1", "val1") .put(ThreadContext.PREFIX + ".key2", "val 2") .build(); private static final GenericAction[] ACTIONS = new GenericAction[] { // client actions GetAction.INSTANCE, SearchAction.INSTANCE, DeleteAction.INSTANCE, DeleteStoredScriptAction.INSTANCE, IndexAction.INSTANCE, // cluster admin actions ClusterStatsAction.INSTANCE, CreateSnapshotAction.INSTANCE, ClusterRerouteAction.INSTANCE, // indices admin actions CreateIndexAction.INSTANCE, IndicesStatsAction.INSTANCE, ClearIndicesCacheAction.INSTANCE, FlushAction.INSTANCE }; protected ThreadPool threadPool; private Client client; @Override public void setUp() throws Exception { super.setUp(); Settings settings = Settings.builder() .put(HEADER_SETTINGS) .put("path.home", createTempDir().toString()) .put("node.name", "test-" + getTestName()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); threadPool = new ThreadPool(settings); client = buildClient(settings, ACTIONS); } @Override public void tearDown() throws Exception { super.tearDown(); client.close(); terminate(threadPool); } protected abstract Client buildClient(Settings headersSettings, GenericAction[] testedActions); public void testActions() { // TODO this is a really shitty way to test it, we need to figure out a way to test all the client methods // without specifying each one (reflection doesn't as each action needs its own special settings, without // them, request validation will fail before the test is executed. (one option is to enable disabling the // validation in the settings??? - ugly and conceptually wrong) // choosing arbitrary top level actions to test client.prepareGet("idx", "type", "id").execute(new AssertingActionListener<>(GetAction.NAME, client.threadPool())); client.prepareSearch().execute(new AssertingActionListener<>(SearchAction.NAME, client.threadPool())); client.prepareDelete("idx", "type", "id").execute(new AssertingActionListener<>(DeleteAction.NAME, client.threadPool())); client.admin().cluster().prepareDeleteStoredScript("lang", "id").execute(new AssertingActionListener<>(DeleteStoredScriptAction.NAME, client.threadPool())); client.prepareIndex("idx", "type", "id").setSource("source", XContentType.JSON).execute(new AssertingActionListener<>(IndexAction.NAME, client.threadPool())); // choosing arbitrary cluster admin actions to test client.admin().cluster().prepareClusterStats().execute(new AssertingActionListener<>(ClusterStatsAction.NAME, client.threadPool())); client.admin().cluster().prepareCreateSnapshot("repo", "bck").execute(new AssertingActionListener<>(CreateSnapshotAction.NAME, client.threadPool())); client.admin().cluster().prepareReroute().execute(new AssertingActionListener<>(ClusterRerouteAction.NAME, client.threadPool())); // choosing arbitrary indices admin actions to test client.admin().indices().prepareCreate("idx").execute(new AssertingActionListener<>(CreateIndexAction.NAME, client.threadPool())); client.admin().indices().prepareStats().execute(new AssertingActionListener<>(IndicesStatsAction.NAME, client.threadPool())); client.admin().indices().prepareClearCache("idx1", "idx2").execute(new AssertingActionListener<>(ClearIndicesCacheAction.NAME, client.threadPool())); client.admin().indices().prepareFlush().execute(new AssertingActionListener<>(FlushAction.NAME, client.threadPool())); } public void testOverrideHeader() throws Exception { String key1Val = randomAlphaOfLength(5); Map<String, String> expected = new HashMap<>(); expected.put("key1", key1Val); expected.put("key2", "val 2"); client.threadPool().getThreadContext().putHeader("key1", key1Val); client.prepareGet("idx", "type", "id") .execute(new AssertingActionListener<>(GetAction.NAME, expected, client.threadPool())); client.admin().cluster().prepareClusterStats() .execute(new AssertingActionListener<>(ClusterStatsAction.NAME, expected, client.threadPool())); client.admin().indices().prepareCreate("idx") .execute(new AssertingActionListener<>(CreateIndexAction.NAME, expected, client.threadPool())); } protected static void assertHeaders(Map<String, String> headers, Map<String, String> expected) { assertNotNull(headers); assertEquals(expected.size(), headers.size()); for (Map.Entry<String, String> expectedEntry : expected.entrySet()) { assertEquals(headers.get(expectedEntry.getKey()), expectedEntry.getValue()); } } protected static void assertHeaders(ThreadPool pool) { assertHeaders(pool.getThreadContext().getHeaders(), (Map)HEADER_SETTINGS.getAsSettings(ThreadContext.PREFIX).getAsStructuredMap()); } public static class InternalException extends Exception { private final String action; public InternalException(String action) { this.action = action; } } protected static class AssertingActionListener<T> implements ActionListener<T> { private final String action; private final Map<String, String> expectedHeaders; private final ThreadPool pool; public AssertingActionListener(String action, ThreadPool pool) { this(action, (Map)HEADER_SETTINGS.getAsSettings(ThreadContext.PREFIX).getAsStructuredMap(), pool); } public AssertingActionListener(String action, Map<String, String> expectedHeaders, ThreadPool pool) { this.action = action; this.expectedHeaders = expectedHeaders; this.pool = pool; } @Override public void onResponse(T t) { fail("an internal exception was expected for action [" + action + "]"); } @Override public void onFailure(Exception t) { Throwable e = unwrap(t, InternalException.class); assertThat("expected action [" + action + "] to throw an internal exception", e, notNullValue()); assertThat(action, equalTo(((InternalException) e).action)); Map<String, String> headers = pool.getThreadContext().getHeaders(); assertHeaders(headers, expectedHeaders); } public Throwable unwrap(Throwable t, Class<? extends Throwable> exceptionType) { int counter = 0; Throwable result = t; while (!exceptionType.isInstance(result)) { if (result.getCause() == null) { return null; } if (result.getCause() == result) { return null; } if (counter++ > 10) { // dear god, if we got more than 10 levels down, WTF? just bail fail("Exception cause unwrapping ran for 10 levels: " + ExceptionsHelper.stackTrace(t)); return null; } result = result.getCause(); } return result; } } }
nezirus/elasticsearch
core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java
Java
apache-2.0
9,899
// Copyright 2014 The Oppia Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS-IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Minor general functional components for end-to-end testing * with protractor. */ var editor = require('./editor.js'); // Time (in ms) to wait when the system needs time for some computations. var WAIT_TIME = 4000; // Optionally accepts a waitTime integer in milliseconds. var waitForSystem = function() { var waitTime; if (arguments.length === 1) { waitTime = arguments[0]; } else { waitTime = WAIT_TIME; } browser.sleep(waitTime); }; var scrollToTop = function() { browser.executeScript('window.scrollTo(0,0);'); }; // We will report all console logs of level greater than this. var CONSOLE_LOG_THRESHOLD = 900; var CONSOLE_ERRORS_TO_IGNORE = []; var checkForConsoleErrors = function(errorsToIgnore) { var irrelevantErrors = errorsToIgnore.concat(CONSOLE_ERRORS_TO_IGNORE); browser.manage().logs().get('browser').then(function(browserLogs) { var fatalErrors = []; for (var i = 0; i < browserLogs.length; i++) { if (browserLogs[i].level.value > CONSOLE_LOG_THRESHOLD) { var errorFatal = true; for (var j = 0; j < irrelevantErrors.length; j++) { if (browserLogs[i].message.match(irrelevantErrors[j])) { errorFatal = false; } } if (errorFatal) { fatalErrors.push(browserLogs[i]); } } } expect(fatalErrors).toEqual([]); }); }; var SERVER_URL_PREFIX = 'http://localhost:9001'; var LIBRARY_URL_SUFFIX = '/library'; var EDITOR_URL_SLICE = '/create/'; var PLAYER_URL_SLICE = '/explore/'; var LOGIN_URL_SUFFIX = '/_ah/login'; var ADMIN_URL_SUFFIX = '/admin'; var MODERATOR_URL_SUFFIX = '/moderator'; var DONATION_THANK_URL_SUFFIX = '/thanks'; // Note that this only works in dev, due to the use of cache slugs in prod. var SCRIPTS_URL_SLICE = '/assets/scripts/'; var EXPLORATION_ID_LENGTH = 12; var FIRST_STATE_DEFAULT_NAME = 'Introduction'; var _getExplorationId = function(currentUrlPrefix) { return { then: function(callbackFunction) { browser.getCurrentUrl().then(function(url) { expect(url.slice(0, currentUrlPrefix.length)).toBe(currentUrlPrefix); var explorationId = url.slice( currentUrlPrefix.length, currentUrlPrefix.length + EXPLORATION_ID_LENGTH); return callbackFunction(explorationId); }); } }; }; // If we are currently in the editor, this will return a promise with the // exploration ID. var getExplorationIdFromEditor = function() { return _getExplorationId(SERVER_URL_PREFIX + EDITOR_URL_SLICE); }; // Likewise for the player var getExplorationIdFromPlayer = function() { return _getExplorationId(SERVER_URL_PREFIX + PLAYER_URL_SLICE); }; // The explorationId here should be a string, not a promise. var openEditor = function(explorationId) { browser.get(EDITOR_URL_SLICE + explorationId); browser.waitForAngular(); editor.exitTutorialIfNecessary(); }; var openPlayer = function(explorationId) { browser.get(PLAYER_URL_SLICE + explorationId); browser.waitForAngular(); }; // Takes the user from an exploration editor to its player. // NOTE: we do not use the preview button because that will open a new window. var moveToPlayer = function() { getExplorationIdFromEditor().then(openPlayer); }; // Takes the user from the exploration player to its editor. var moveToEditor = function() { getExplorationIdFromPlayer().then(openEditor); }; var expect404Error = function() { expect(element(by.css('.protractor-test-error-container')).getText()). toMatch('Error 404'); }; // Checks no untranslated values are shown in the page. var ensurePageHasNoTranslationIds = function() { // The use of the InnerHTML is hacky, but is faster than checking each // individual component that contains text. element(by.css('.oppia-base-container')).getInnerHtml().then( function(promiseValue) { // First remove all the attributes translate and variables that are // not displayed var REGEX_TRANSLATE_ATTR = new RegExp('translate="I18N_', 'g'); var REGEX_NG_VARIABLE = new RegExp('<\\[\'I18N_', 'g'); expect(promiseValue.replace(REGEX_TRANSLATE_ATTR, '') .replace(REGEX_NG_VARIABLE, '')).not.toContain('I18N'); }); }; var acceptAlert = function() { browser.wait(function() { return browser.switchTo().alert().accept().then( function() { return true; }, function() { return false; } ); }); }; exports.acceptAlert = acceptAlert; exports.waitForSystem = waitForSystem; exports.scrollToTop = scrollToTop; exports.checkForConsoleErrors = checkForConsoleErrors; exports.SERVER_URL_PREFIX = SERVER_URL_PREFIX; exports.LIBRARY_URL_SUFFIX = LIBRARY_URL_SUFFIX; exports.EDITOR_URL_SLICE = EDITOR_URL_SLICE; exports.LOGIN_URL_SUFFIX = LOGIN_URL_SUFFIX; exports.MODERATOR_URL_SUFFIX = MODERATOR_URL_SUFFIX; exports.ADMIN_URL_SUFFIX = ADMIN_URL_SUFFIX; exports.DONATION_THANK_URL_SUFFIX = DONATION_THANK_URL_SUFFIX; exports.SCRIPTS_URL_SLICE = SCRIPTS_URL_SLICE; exports.FIRST_STATE_DEFAULT_NAME = FIRST_STATE_DEFAULT_NAME; exports.getExplorationIdFromEditor = getExplorationIdFromEditor; exports.getExplorationIdFromPlayer = getExplorationIdFromPlayer; exports.openEditor = openEditor; exports.openPlayer = openPlayer; exports.moveToPlayer = moveToPlayer; exports.moveToEditor = moveToEditor; exports.expect404Error = expect404Error; exports.ensurePageHasNoTranslationIds = ensurePageHasNoTranslationIds;
amgowano/oppia
core/tests/protractor_utils/general.js
JavaScript
apache-2.0
6,076
package org.jetbrains.plugins.scala.lang.formatter; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.EditorFactory; import com.intellij.openapi.editor.impl.DocumentImpl; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiFile; import com.intellij.psi.codeStyle.CodeStyleManager; import com.intellij.psi.codeStyle.CodeStyleSettings; import com.intellij.psi.codeStyle.CodeStyleSettingsManager; import com.intellij.psi.codeStyle.CommonCodeStyleSettings; import com.intellij.testFramework.LightIdeaTestCase; import com.intellij.util.IncorrectOperationException; import org.jetbrains.plugins.scala.ScalaLanguage; import org.jetbrains.plugins.scala.lang.formatting.settings.ScalaCodeStyleSettings; import org.jetbrains.plugins.scala.util.TestUtils; import java.io.File; import java.util.EnumMap; import java.util.Map; /** * Base class for java formatter tests that holds utility methods. * * @author Denis Zhdanov * @since Apr 27, 2010 6:26:29 PM */ //todo: almost duplicate from Java public abstract class AbstractScalaFormatterTestBase extends LightIdeaTestCase { protected enum Action {REFORMAT, INDENT} private interface TestFormatAction { void run(PsiFile psiFile, int startOffset, int endOffset); } private static final Map<Action, TestFormatAction> ACTIONS = new EnumMap<Action, TestFormatAction>(Action.class); static { ACTIONS.put(Action.REFORMAT, new TestFormatAction() { public void run(PsiFile psiFile, int startOffset, int endOffset) { CodeStyleManager.getInstance(getProject()).reformatText(psiFile, startOffset, endOffset); } }); ACTIONS.put(Action.INDENT, new TestFormatAction() { public void run(PsiFile psiFile, int startOffset, int endOffset) { CodeStyleManager.getInstance(getProject()).adjustLineIndent(psiFile, startOffset); } }); } private static final String BASE_PATH = TestUtils.getTestDataPath() + "/psi/formatter"; public TextRange myTextRange; public TextRange myLineRange; public CommonCodeStyleSettings getCommonSettings() { return getSettings().getCommonSettings(ScalaLanguage.INSTANCE); } public ScalaCodeStyleSettings getScalaSettings() { return getSettings().getCustomSettings(ScalaCodeStyleSettings.class); } public CodeStyleSettings getSettings() { return CodeStyleSettingsManager.getSettings(getProject()); } public CommonCodeStyleSettings.IndentOptions getIndentOptions() { return getCommonSettings().getIndentOptions(); } public void doTest() throws Exception { doTest(getTestName(false) + ".scala", getTestName(false) + "_after.scala"); } public void doTest(String fileNameBefore, String fileNameAfter) throws Exception { doTextTest(Action.REFORMAT, loadFile(fileNameBefore), loadFile(fileNameAfter)); } public void doTextTest(final String text, String textAfter) throws IncorrectOperationException { doTextTest(Action.REFORMAT, StringUtil.convertLineSeparators(text), StringUtil.convertLineSeparators(textAfter)); } public void doTextTest(final Action action, final String text, String textAfter) throws IncorrectOperationException { final PsiFile file = createFile("A.scala", text); if (myLineRange != null) { final DocumentImpl document = new DocumentImpl(text); myTextRange = new TextRange(document.getLineStartOffset(myLineRange.getStartOffset()), document.getLineEndOffset(myLineRange.getEndOffset())); } /* CommandProcessor.getInstance().executeCommand(getProject(), new Runnable() { public void run() { ApplicationManager.getApplication().runWriteAction(new Runnable() { public void run() { performFormatting(file); } }); } }, null, null); assertEquals(prepareText(textAfter), prepareText(file.getText())); */ final PsiDocumentManager manager = PsiDocumentManager.getInstance(getProject()); final Document document = manager.getDocument(file); CommandProcessor.getInstance().executeCommand(getProject(), new Runnable() { public void run() { ApplicationManager.getApplication().runWriteAction(new Runnable() { public void run() { document.replaceString(0, document.getTextLength(), text); manager.commitDocument(document); try { TextRange rangeToUse = myTextRange; if (rangeToUse == null) { rangeToUse = file.getTextRange(); } ACTIONS.get(action).run(file, rangeToUse.getStartOffset(), rangeToUse.getEndOffset()); } catch (IncorrectOperationException e) { assertTrue(e.getLocalizedMessage(), false); } } }); } }, "", ""); if (document == null) { fail("Don't expect the document to be null"); return; } assertEquals(prepareText(textAfter), prepareText(document.getText())); manager.commitDocument(document); assertEquals(prepareText(textAfter), prepareText(file.getText())); } //todo: was unused, should be deleted (??) /* public void doMethodTest(final String before, final String after) throws Exception { doTextTest( Action.REFORMAT, "class Foo{\n" + " void foo() {\n" + before + '\n' + " }\n" + "}", "class Foo {\n" + " void foo() {\n" + shiftIndentInside(after, 8, false) + '\n' + " }\n" + "}" ); } public void doClassTest(final String before, final String after) throws Exception { doTextTest( Action.REFORMAT, "class Foo{\n" + before + '\n' + "}", "class Foo {\n" + shiftIndentInside(after, 4, false) + '\n' + "}" ); }*/ private static String prepareText(String actual) { if (actual.startsWith("\n")) { actual = actual.substring(1); } if (actual.startsWith("\n")) { actual = actual.substring(1); } // Strip trailing spaces final Document doc = EditorFactory.getInstance().createDocument(actual); CommandProcessor.getInstance().executeCommand(getProject(), new Runnable() { public void run() { ApplicationManager.getApplication().runWriteAction(new Runnable() { public void run() { ((DocumentImpl)doc).stripTrailingSpaces(getProject()); } }); } }, "formatting", null); return doc.getText().trim(); } private static String loadFile(String name) throws Exception { String fullName = BASE_PATH + File.separatorChar + name; String text = new String(FileUtil.loadFileText(new File(fullName))); text = StringUtil.convertLineSeparators(text); return text; } @Override protected void setUp() throws Exception { super.setUp(); TestUtils.disableTimerThread(); } }
ilinum/intellij-scala
test/org/jetbrains/plugins/scala/lang/formatter/AbstractScalaFormatterTestBase.java
Java
apache-2.0
7,072
<?php /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /** * see * http://www.opensocial.org/Technical-Resources/opensocial-spec-v081/opensocial-reference#opensocial.Activity */ class Shindig_Activity { public $appId; public $body; public $bodyId; public $externalId; public $id; public $mediaItems; public $postedTime; public $priority; public $streamFaviconUrl; public $streamSourceUrl; public $streamTitle; public $streamUrl; public $templateParams; public $title; public $titleId; public $url; public $userId; public function __construct($id, $userId) { $this->id = $id; $this->userId = $userId; } public function getAppId() { return $this->appId; } public function setAppId($appId) { $this->appId = $appId; } public function getBody() { return $this->body; } public function setBody($body) { $this->body = $body; } public function getBodyId() { return $this->bodyId; } public function setBodyId($bodyId) { $this->bodyId = $bodyId; } public function getExternalId() { return $this->externalId; } public function setExternalId($externalId) { $this->externalId = $externalId; } public function getId() { return $this->id; } public function setId($id) { $this->id = $id; } public function getMediaItems() { return $this->mediaItems; } public function setMediaItems($mediaItems) { $this->mediaItems = $mediaItems; } public function getPostedTime() { return $this->postedTime; } public function setPostedTime($postedTime) { $this->postedTime = $postedTime; } public function getPriority() { return $this->priority; } public function setPriority($priority) { $this->priority = $priority; } public function getStreamFaviconUrl() { return $this->streamFaviconUrl; } public function setStreamFaviconUrl($streamFaviconUrl) { $this->streamFaviconUrl = $streamFaviconUrl; } public function getStreamSourceUrl() { return $this->streamSourceUrl; } public function setStreamSourceUrl($streamSourceUrl) { $this->streamSourceUrl = $streamSourceUrl; } public function getStreamTitle() { return $this->streamTitle; } public function setStreamTitle($streamTitle) { $this->streamTitle = $streamTitle; } public function getStreamUrl() { return $this->streamUrl; } public function setStreamUrl($streamUrl) { $this->streamUrl = $streamUrl; } public function getTemplateParams() { return $this->templateParams; } public function setTemplateParams($templateParams) { $this->templateParams = $templateParams; } public function getTitle() { return $this->title; } public function setTitle($title) { $this->title = strip_tags($title, '<b><i><a><span><img>'); } public function getTitleId() { return $this->titleId; } public function setTitleId($titleId) { $this->titleId = $titleId; } public function getUrl() { return $this->url; } public function setUrl($url) { $this->url = $url; } public function getUserId() { return $this->userId; } public function setUserId($userId) { $this->userId = $userId; } }
cripure/openpne3
plugins/opOpenSocialPlugin/lib/vendor/Shindig/src/social/model/Activity.php
PHP
apache-2.0
4,000
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.module.impl.scopes; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.roots.*; import com.intellij.openapi.roots.libraries.Library; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Condition; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.util.containers.ContainerUtil; import gnu.trove.THashSet; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import java.util.*; /** * @author max */ public class LibraryRuntimeClasspathScope extends GlobalSearchScope { private final ProjectFileIndex myIndex; private final LinkedHashSet<VirtualFile> myEntries = new LinkedHashSet<VirtualFile>(); private int myCachedHashCode = 0; public LibraryRuntimeClasspathScope(final Project project, final List<Module> modules) { super(project); myIndex = ProjectRootManager.getInstance(project).getFileIndex(); final Set<Sdk> processedSdk = new THashSet<Sdk>(); final Set<Library> processedLibraries = new THashSet<Library>(); final Set<Module> processedModules = new THashSet<Module>(); final Condition<OrderEntry> condition = new Condition<OrderEntry>() { @Override public boolean value(OrderEntry orderEntry) { if (orderEntry instanceof ModuleOrderEntry) { final Module module = ((ModuleOrderEntry)orderEntry).getModule(); return module != null && !processedModules.contains(module); } return true; } }; for (Module module : modules) { buildEntries(module, processedModules, processedLibraries, processedSdk, condition); } } public LibraryRuntimeClasspathScope(Project project, LibraryOrderEntry entry) { super(project); myIndex = ProjectRootManager.getInstance(project).getFileIndex(); Collections.addAll(myEntries, entry.getRootFiles(OrderRootType.CLASSES)); } public int hashCode() { if (myCachedHashCode == 0) { myCachedHashCode = myEntries.hashCode(); } return myCachedHashCode; } public boolean equals(Object object) { if (object == this) return true; if (object == null || object.getClass() != LibraryRuntimeClasspathScope.class) return false; final LibraryRuntimeClasspathScope that = (LibraryRuntimeClasspathScope)object; return that.myEntries.equals(myEntries); } private void buildEntries(@NotNull final Module module, @NotNull final Set<Module> processedModules, @NotNull final Set<Library> processedLibraries, @NotNull final Set<Sdk> processedSdk, Condition<OrderEntry> condition) { if (!processedModules.add(module)) return; ModuleRootManager.getInstance(module).orderEntries().recursively().satisfying(condition).process(new RootPolicy<LinkedHashSet<VirtualFile>>() { public LinkedHashSet<VirtualFile> visitLibraryOrderEntry(final LibraryOrderEntry libraryOrderEntry, final LinkedHashSet<VirtualFile> value) { final Library library = libraryOrderEntry.getLibrary(); if (library != null && processedLibraries.add(library)) { ContainerUtil.addAll(value, libraryOrderEntry.getRootFiles(OrderRootType.CLASSES)); } return value; } public LinkedHashSet<VirtualFile> visitModuleSourceOrderEntry(final ModuleSourceOrderEntry moduleSourceOrderEntry, final LinkedHashSet<VirtualFile> value) { processedModules.add(moduleSourceOrderEntry.getOwnerModule()); ContainerUtil.addAll(value, moduleSourceOrderEntry.getRootModel().getSourceRoots()); return value; } @Override public LinkedHashSet<VirtualFile> visitModuleOrderEntry(ModuleOrderEntry moduleOrderEntry, LinkedHashSet<VirtualFile> value) { final Module depModule = moduleOrderEntry.getModule(); if (depModule != null) { ContainerUtil.addAll(value, ModuleRootManager.getInstance(depModule).getSourceRoots()); } return value; } public LinkedHashSet<VirtualFile> visitJdkOrderEntry(final JdkOrderEntry jdkOrderEntry, final LinkedHashSet<VirtualFile> value) { final Sdk jdk = jdkOrderEntry.getJdk(); if (jdk != null && processedSdk.add(jdk)) { ContainerUtil.addAll(value, jdkOrderEntry.getRootFiles(OrderRootType.CLASSES)); } return value; } }, myEntries); } public boolean contains(VirtualFile file) { return myEntries.contains(getFileRoot(file)); } @Nullable private VirtualFile getFileRoot(VirtualFile file) { if (myIndex.isLibraryClassFile(file)) { return myIndex.getClassRootForFile(file); } if (myIndex.isInContent(file)) { return myIndex.getSourceRootForFile(file); } if (myIndex.isInLibraryClasses(file)) { return myIndex.getClassRootForFile(file); } return null; } public int compare(VirtualFile file1, VirtualFile file2) { final VirtualFile r1 = getFileRoot(file1); final VirtualFile r2 = getFileRoot(file2); for (VirtualFile root : myEntries) { if (Comparing.equal(r1, root)) return 1; if (Comparing.equal(r2, root)) return -1; } return 0; } @TestOnly public List<VirtualFile> getRoots() { return new ArrayList<VirtualFile>(myEntries); } public boolean isSearchInModuleContent(@NotNull Module aModule) { return false; } public boolean isSearchInLibraries() { return true; } }
liveqmock/platform-tools-idea
platform/indexing-impl/src/com/intellij/openapi/module/impl/scopes/LibraryRuntimeClasspathScope.java
Java
apache-2.0
6,421
/** * Copyright (C) 2013-2014 EaseMob Technologies. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /******************************************************************************* * Copyright 2011, 2012 Chris Banes. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package com.easemob.chatuidemo.widget.photoview; import android.annotation.TargetApi; import android.content.Context; import android.os.Build.VERSION; import android.os.Build.VERSION_CODES; import android.widget.OverScroller; import android.widget.Scroller; public abstract class ScrollerProxy { public static ScrollerProxy getScroller(Context context) { if (VERSION.SDK_INT < VERSION_CODES.GINGERBREAD) { return new PreGingerScroller(context); } else { return new GingerScroller(context); } } public abstract boolean computeScrollOffset(); public abstract void fling(int startX, int startY, int velocityX, int velocityY, int minX, int maxX, int minY, int maxY, int overX, int overY); public abstract void forceFinished(boolean finished); public abstract int getCurrX(); public abstract int getCurrY(); @TargetApi(9) private static class GingerScroller extends ScrollerProxy { private OverScroller mScroller; public GingerScroller(Context context) { mScroller = new OverScroller(context); } @Override public boolean computeScrollOffset() { return mScroller.computeScrollOffset(); } @Override public void fling(int startX, int startY, int velocityX, int velocityY, int minX, int maxX, int minY, int maxY, int overX, int overY) { mScroller.fling(startX, startY, velocityX, velocityY, minX, maxX, minY, maxY, overX, overY); } @Override public void forceFinished(boolean finished) { mScroller.forceFinished(finished); } @Override public int getCurrX() { return mScroller.getCurrX(); } @Override public int getCurrY() { return mScroller.getCurrY(); } } private static class PreGingerScroller extends ScrollerProxy { private Scroller mScroller; public PreGingerScroller(Context context) { mScroller = new Scroller(context); } @Override public boolean computeScrollOffset() { return mScroller.computeScrollOffset(); } @Override public void fling(int startX, int startY, int velocityX, int velocityY, int minX, int maxX, int minY, int maxY, int overX, int overY) { mScroller.fling(startX, startY, velocityX, velocityY, minX, maxX, minY, maxY); } @Override public void forceFinished(boolean finished) { mScroller.forceFinished(finished); } @Override public int getCurrX() { return mScroller.getCurrX(); } @Override public int getCurrY() { return mScroller.getCurrY(); } } }
cf0566/CarMarket
src/com/easemob/chatuidemo/widget/photoview/ScrollerProxy.java
Java
apache-2.0
3,945
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.sql.calcite.expression.builtin; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexLiteral; import org.apache.calcite.rex.RexNode; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.druid.query.filter.DimFilter; import org.apache.druid.query.filter.LikeDimFilter; import org.apache.druid.segment.VirtualColumn; import org.apache.druid.segment.column.RowSignature; import org.apache.druid.sql.calcite.expression.DirectOperatorConversion; import org.apache.druid.sql.calcite.expression.DruidExpression; import org.apache.druid.sql.calcite.expression.Expressions; import org.apache.druid.sql.calcite.planner.PlannerContext; import org.apache.druid.sql.calcite.rel.VirtualColumnRegistry; import javax.annotation.Nullable; import java.util.List; public class LikeOperatorConversion extends DirectOperatorConversion { private static final SqlOperator SQL_FUNCTION = SqlStdOperatorTable.LIKE; public LikeOperatorConversion() { super(SQL_FUNCTION, "like"); } @Override public SqlOperator calciteOperator() { return SQL_FUNCTION; } @Nullable @Override public DimFilter toDruidFilter( PlannerContext plannerContext, RowSignature rowSignature, @Nullable VirtualColumnRegistry virtualColumnRegistry, RexNode rexNode ) { final List<RexNode> operands = ((RexCall) rexNode).getOperands(); final DruidExpression druidExpression = Expressions.toDruidExpression( plannerContext, rowSignature, operands.get(0) ); if (druidExpression == null) { return null; } if (druidExpression.isSimpleExtraction()) { return new LikeDimFilter( druidExpression.getSimpleExtraction().getColumn(), RexLiteral.stringValue(operands.get(1)), operands.size() > 2 ? RexLiteral.stringValue(operands.get(2)) : null, druidExpression.getSimpleExtraction().getExtractionFn() ); } else if (virtualColumnRegistry != null) { VirtualColumn v = virtualColumnRegistry.getOrCreateVirtualColumnForExpression( plannerContext, druidExpression, operands.get(0).getType().getSqlTypeName() ); return new LikeDimFilter( v.getOutputName(), RexLiteral.stringValue(operands.get(1)), operands.size() > 2 ? RexLiteral.stringValue(operands.get(2)) : null, null ); } else { return null; } } }
implydata/druid
sql/src/main/java/org/apache/druid/sql/calcite/expression/builtin/LikeOperatorConversion.java
Java
apache-2.0
3,331
/// Copyright (c) 2009 Microsoft Corporation /// /// Redistribution and use in source and binary forms, with or without modification, are permitted provided /// that the following conditions are met: /// * Redistributions of source code must retain the above copyright notice, this list of conditions and /// the following disclaimer. /// * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and /// the following disclaimer in the documentation and/or other materials provided with the distribution. /// * Neither the name of Microsoft nor the names of its contributors may be used to /// endorse or promote products derived from this software without specific prior written permission. /// /// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR /// IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS /// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE /// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT /// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS /// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, /// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF /// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ES5Harness.registerTest({ id: "15.2.3.6-3-37", path: "TestCases/chapter15/15.2/15.2.3/15.2.3.6/15.2.3.6-3-37.js", description: "Object.defineProperty - 'Attributes' is a Number object that uses Object's [[Get]] method to access the 'enumerable' property (8.10.5 step 3.a)", test: function testcase() { var obj = {}; var accessed = false; var numObj = new Number(-2); numObj.enumerable = true; Object.defineProperty(obj, "property", numObj); for (var prop in obj) { if (prop === "property") { accessed = true; } } return accessed; }, precondition: function prereq() { return fnExists(Object.defineProperty); } });
hnafar/IronJS
Src/Tests/ietestcenter/chapter15/15.2/15.2.3/15.2.3.6/15.2.3.6-3-37.js
JavaScript
apache-2.0
2,322
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.pherf.rules; import org.apache.phoenix.thirdparty.com.google.common.base.Preconditions; import org.apache.phoenix.pherf.configuration.Column; import org.apache.phoenix.pherf.configuration.DataSequence; import org.apache.phoenix.pherf.configuration.DataTypeMapping; import java.util.concurrent.atomic.AtomicLong; public class SequentialIntegerDataGenerator implements RuleBasedDataGenerator { private final Column columnRule; private final AtomicLong counter; private final long minValue; private final long maxValue; public SequentialIntegerDataGenerator(Column columnRule) { Preconditions.checkArgument(columnRule.getDataSequence() == DataSequence.SEQUENTIAL); Preconditions.checkArgument(isIntegerType(columnRule.getType())); this.columnRule = columnRule; minValue = columnRule.getMinValue(); maxValue = columnRule.getMaxValue(); counter = new AtomicLong(0); } /** * Note that this method rolls over for attempts to get larger than maxValue * @return new DataValue */ @Override public DataValue getDataValue() { return new DataValue(columnRule.getType(), String.valueOf((counter.getAndIncrement() % (maxValue - minValue + 1)) + minValue)); } // Probably could go into a util class in the future boolean isIntegerType(DataTypeMapping mapping) { switch (mapping) { case BIGINT: case INTEGER: case TINYINT: case UNSIGNED_LONG: return true; default: return false; } } }
ankitsinghal/phoenix
phoenix-pherf/src/main/java/org/apache/phoenix/pherf/rules/SequentialIntegerDataGenerator.java
Java
apache-2.0
2,431
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.profiler; import static com.google.common.truth.Truth.assertThat; import static java.nio.charset.StandardCharsets.ISO_8859_1; import static org.junit.Assert.fail; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.clock.Clock; import com.google.devtools.build.lib.profiler.Profiler.ProfiledTaskKinds; import com.google.devtools.build.lib.profiler.analysis.ProfileInfo; import com.google.devtools.build.lib.testutil.FoundationTestCase; import com.google.devtools.build.lib.testutil.ManualClock; import com.google.devtools.build.lib.testutil.Suite; import com.google.devtools.build.lib.testutil.TestSpec; import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.Path; import java.io.InputStream; import java.io.OutputStream; import java.util.Arrays; import java.util.concurrent.atomic.AtomicInteger; import java.util.zip.Deflater; import java.util.zip.DeflaterOutputStream; import java.util.zip.Inflater; import java.util.zip.InflaterInputStream; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Unit tests for the profiler. */ @TestSpec(size = Suite.MEDIUM_TESTS) // testConcurrentProfiling takes ~700ms, testProfiler 100ms. @RunWith(JUnit4.class) public class ProfilerTest extends FoundationTestCase { private Path cacheDir; private Profiler profiler = Profiler.instance(); private ManualClock clock; @Before public final void createCacheDirectory() throws Exception { cacheDir = scratch.dir("/tmp"); } @Before public final void setManualClock() { clock = new ManualClock(); BlazeClock.setClock(clock); } @Test public void testProfilerActivation() throws Exception { Path cacheFile = cacheDir.getRelative("profile1.dat"); assertThat(profiler.isActive()).isFalse(); profiler.start(ProfiledTaskKinds.ALL, cacheFile.getOutputStream(), "basic test", false, BlazeClock.instance(), BlazeClock.instance().nanoTime()); assertThat(profiler.isActive()).isTrue(); profiler.stop(); assertThat(profiler.isActive()).isFalse(); } @Test public void testTaskDetails() throws Exception { Path cacheFile = cacheDir.getRelative("profile1.dat"); profiler.start(ProfiledTaskKinds.ALL, cacheFile.getOutputStream(), "basic test", false, BlazeClock.instance(), BlazeClock.instance().nanoTime()); profiler.startTask(ProfilerTask.ACTION, "action task"); profiler.logEvent(ProfilerTask.TEST, "event"); profiler.completeTask(ProfilerTask.ACTION); profiler.stop(); ProfileInfo info = ProfileInfo.loadProfile(cacheFile); info.calculateStats(); ProfileInfo.Task task = info.allTasksById.get(0); assertThat(task.id).isEqualTo(1); assertThat(task.type).isEqualTo(ProfilerTask.ACTION); assertThat(task.getDescription()).isEqualTo("action task"); task = info.allTasksById.get(1); assertThat(task.id).isEqualTo(2); assertThat(task.type).isEqualTo(ProfilerTask.TEST); assertThat(task.getDescription()).isEqualTo("event"); } @Test public void testProfiler() throws Exception { Path cacheFile = cacheDir.getRelative("profile1.dat"); profiler.start(ProfiledTaskKinds.ALL, cacheFile.getOutputStream(), "basic test", false, BlazeClock.instance(), BlazeClock.instance().nanoTime()); profiler.logSimpleTask(BlazeClock.instance().nanoTime(), ProfilerTask.PHASE, "profiler start"); profiler.startTask(ProfilerTask.ACTION, "complex task"); profiler.logEvent(ProfilerTask.PHASE, "event1"); profiler.startTask(ProfilerTask.ACTION_CHECK, "complex subtask"); // next task takes less than 10 ms and should be only aggregated profiler.logSimpleTask(BlazeClock.instance().nanoTime(), ProfilerTask.VFS_STAT, "stat1"); long startTime = BlazeClock.instance().nanoTime(); clock.advanceMillis(20); // this one will take at least 20 ms and should be present profiler.logSimpleTask(startTime, ProfilerTask.VFS_STAT, "stat2"); profiler.completeTask(ProfilerTask.ACTION_CHECK); profiler.completeTask(ProfilerTask.ACTION); profiler.stop(); // all other calls to profiler should be ignored profiler.logEvent(ProfilerTask.PHASE, "should be ignored"); // normally this would cause an exception but it is ignored since profiler // is disabled profiler.completeTask(ProfilerTask.ACTION_EXECUTE); ProfileInfo info = ProfileInfo.loadProfile(cacheFile); info.calculateStats(); assertThat(info.allTasksById).hasSize(6); // only 5 tasks + finalization should be recorded ProfileInfo.Task task = info.allTasksById.get(0); assertThat(task.stats.isEmpty()).isTrue(); task = info.allTasksById.get(1); int count = 0; for (ProfileInfo.AggregateAttr attr : task.getStatAttrArray()) { if (attr != null) { count++; } } assertThat(count).isEqualTo(2); // only children are GENERIC and ACTION_CHECK assertThat(ProfilerTask.TASK_COUNT).isEqualTo(task.aggregatedStats.toArray().length); assertThat(task.aggregatedStats.getAttr(ProfilerTask.VFS_STAT).count).isEqualTo(2); task = info.allTasksById.get(2); assertThat(task.durationNanos).isEqualTo(0); task = info.allTasksById.get(3); assertThat(task.stats.getAttr(ProfilerTask.VFS_STAT).count).isEqualTo(2); assertThat(task.subtasks).hasLength(1); assertThat(task.subtasks[0].getDescription()).isEqualTo("stat2"); // assert that startTime grows with id long time = -1; for (ProfileInfo.Task t : info.allTasksById) { assertThat(t.startTime).isAtLeast(time); time = t.startTime; } } @Test public void testProfilerRecordingAllEvents() throws Exception { Path cacheFile = cacheDir.getRelative("profile1.dat"); profiler.start(ProfiledTaskKinds.ALL, cacheFile.getOutputStream(), "basic test", true, BlazeClock.instance(), BlazeClock.instance().nanoTime()); profiler.startTask(ProfilerTask.ACTION, "action task"); // Next task takes less than 10 ms but should be recorded anyway. clock.advanceMillis(1); profiler.logSimpleTask(BlazeClock.instance().nanoTime(), ProfilerTask.VFS_STAT, "stat1"); profiler.completeTask(ProfilerTask.ACTION); profiler.stop(); ProfileInfo info = ProfileInfo.loadProfile(cacheFile); info.calculateStats(); assertThat(info.allTasksById).hasSize(3); // 2 tasks + finalization should be recorded ProfileInfo.Task task = info.allTasksById.get(1); assertThat(task.type).isEqualTo(ProfilerTask.VFS_STAT); // Check that task would have been dropped if profiler was not configured to record everything. assertThat(task.durationNanos).isLessThan(ProfilerTask.VFS_STAT.minDuration); } @Test public void testProfilerRecordingOnlySlowestEvents() throws Exception { Path profileData = cacheDir.getRelative("foo"); profiler.start(ProfiledTaskKinds.SLOWEST, profileData.getOutputStream(), "test", true, BlazeClock.instance(), BlazeClock.instance().nanoTime()); profiler.logSimpleTask(10000, 20000, ProfilerTask.VFS_STAT, "stat"); profiler.logSimpleTask(20000, 30000, ProfilerTask.REMOTE_EXECUTION, "remote execution"); assertThat(profiler.isProfiling(ProfilerTask.VFS_STAT)).isTrue(); assertThat(profiler.isProfiling(ProfilerTask.REMOTE_EXECUTION)).isFalse(); profiler.stop(); ProfileInfo info = ProfileInfo.loadProfile(profileData); info.calculateStats(); assertThat(info.allTasksById).hasSize(1); // only VFS_STAT task should be recorded ProfileInfo.Task task = info.allTasksById.get(0); assertThat(task.type).isEqualTo(ProfilerTask.VFS_STAT); } @Test public void testProfilerRecordsNothing() throws Exception { Path profileData = cacheDir.getRelative("foo"); profiler.start(ProfiledTaskKinds.NONE, profileData.getOutputStream(), "test", true, BlazeClock.instance(), BlazeClock.instance().nanoTime()); profiler.logSimpleTask(10000, 20000, ProfilerTask.VFS_STAT, "stat"); assertThat(ProfilerTask.VFS_STAT.collectsSlowestInstances()).isTrue(); assertThat(profiler.isProfiling(ProfilerTask.VFS_STAT)).isFalse(); profiler.stop(); ProfileInfo info = ProfileInfo.loadProfile(profileData); info.calculateStats(); assertThat(info.allTasksById).isEmpty(); } @Test public void testInconsistentCompleteTask() throws Exception { Path cacheFile = cacheDir.getRelative("profile2.dat"); profiler.start(ProfiledTaskKinds.ALL, cacheFile.getOutputStream(), "task stack inconsistency test", false, BlazeClock.instance(), BlazeClock.instance().nanoTime()); profiler.startTask(ProfilerTask.PHASE, "some task"); try { profiler.completeTask(ProfilerTask.ACTION); fail(); } catch (IllegalStateException e) { // this is expected } profiler.stop(); } @Test public void testConcurrentProfiling() throws Exception { Path cacheFile = cacheDir.getRelative("profile3.dat"); profiler.start(ProfiledTaskKinds.ALL, cacheFile.getOutputStream(), "concurrent test", false, BlazeClock.instance(), BlazeClock.instance().nanoTime()); long id = Thread.currentThread().getId(); Thread thread1 = new Thread() { @Override public void run() { for (int i = 0; i < 10000; i++) { Profiler.instance().logEvent(ProfilerTask.TEST, "thread1"); } } }; long id1 = thread1.getId(); Thread thread2 = new Thread() { @Override public void run() { for (int i = 0; i < 10000; i++) { Profiler.instance().logEvent(ProfilerTask.TEST, "thread2"); } } }; long id2 = thread2.getId(); profiler.startTask(ProfilerTask.PHASE, "main task"); profiler.logEvent(ProfilerTask.TEST, "starting threads"); thread1.start(); thread2.start(); thread2.join(); thread1.join(); profiler.logEvent(ProfilerTask.TEST, "joined"); profiler.completeTask(ProfilerTask.PHASE); profiler.stop(); ProfileInfo info = ProfileInfo.loadProfile(cacheFile); info.calculateStats(); info.analyzeRelationships(); assertThat(info.allTasksById).hasSize(4 + 10000 + 10000); // total number of tasks assertThat(info.tasksByThread).hasSize(3); // total number of threads // while main thread had 3 tasks, 2 of them were nested, so tasksByThread // would contain only one "main task" task assertThat(info.tasksByThread.get(id)).hasLength(2); ProfileInfo.Task mainTask = info.tasksByThread.get(id)[0]; assertThat(mainTask.getDescription()).isEqualTo("main task"); assertThat(mainTask.subtasks).hasLength(2); // other threads had 10000 independent recorded tasks each assertThat(info.tasksByThread.get(id1)).hasLength(10000); assertThat(info.tasksByThread.get(id2)).hasLength(10000); int startId = mainTask.subtasks[0].id; // id of "starting threads" int endId = mainTask.subtasks[1].id; // id of "joining" assertThat(startId).isLessThan(info.tasksByThread.get(id1)[0].id); assertThat(startId).isLessThan(info.tasksByThread.get(id2)[0].id); assertThat(endId).isGreaterThan(info.tasksByThread.get(id1)[9999].id); assertThat(endId).isGreaterThan(info.tasksByThread.get(id2)[9999].id); } @Test public void testPhaseTasks() throws Exception { Path cacheFile = cacheDir.getRelative("profile4.dat"); profiler.start(ProfiledTaskKinds.ALL, cacheFile.getOutputStream(), "phase test", false, BlazeClock.instance(), BlazeClock.instance().nanoTime()); Thread thread1 = new Thread() { @Override public void run() { for (int i = 0; i < 100; i++) { Profiler.instance().logEvent(ProfilerTask.TEST, "thread1"); } } }; profiler.markPhase(ProfilePhase.INIT); // Empty phase. profiler.markPhase(ProfilePhase.LOAD); thread1.start(); thread1.join(); clock.advanceMillis(1); profiler.markPhase(ProfilePhase.ANALYZE); Thread thread2 = new Thread() { @Override public void run() { profiler.startTask(ProfilerTask.TEST, "complex task"); for (int i = 0; i < 100; i++) { Profiler.instance().logEvent(ProfilerTask.TEST, "thread2a"); } profiler.completeTask(ProfilerTask.TEST); profiler.markPhase(ProfilePhase.EXECUTE); for (int i = 0; i < 100; i++) { Profiler.instance().logEvent(ProfilerTask.TEST, "thread2b"); } } }; thread2.start(); thread2.join(); profiler.logEvent(ProfilerTask.TEST, "last task"); clock.advanceMillis(1); profiler.stop(); ProfileInfo info = ProfileInfo.loadProfile(cacheFile); info.calculateStats(); info.analyzeRelationships(); // number of tasks: INIT(1) + LOAD(1) + Thread1.TEST(100) + ANALYZE(1) // + Thread2a.TEST(100) + TEST(1) + EXECUTE(1) + Thread2b.TEST(100) + TEST(1) + INFO(1) assertThat(info.allTasksById).hasSize(1 + 1 + 100 + 1 + 100 + 1 + 1 + 100 + 1 + 1); assertThat(info.tasksByThread).hasSize(3); // total number of threads // Phase0 contains only itself ProfileInfo.Task p0 = info.getPhaseTask(ProfilePhase.INIT); assertThat(info.getTasksForPhase(p0)).hasSize(1); // Phase1 contains itself and 100 TEST "thread1" tasks ProfileInfo.Task p1 = info.getPhaseTask(ProfilePhase.LOAD); assertThat(info.getTasksForPhase(p1)).hasSize(101); // Phase2 contains itself and 1 "complex task" ProfileInfo.Task p2 = info.getPhaseTask(ProfilePhase.ANALYZE); assertThat(info.getTasksForPhase(p2)).hasSize(2); // Phase3 contains itself, 100 TEST "thread2b" tasks and "last task" ProfileInfo.Task p3 = info.getPhaseTask(ProfilePhase.EXECUTE); assertThat(info.getTasksForPhase(p3)).hasSize(103); } @Test public void testCorruptedFile() throws Exception { Path cacheFile = cacheDir.getRelative("profile5.dat"); profiler.start(ProfiledTaskKinds.ALL, cacheFile.getOutputStream(), "phase test", false, BlazeClock.instance(), BlazeClock.instance().nanoTime()); for (int i = 0; i < 100; i++) { profiler.startTask(ProfilerTask.TEST, "outer task " + i); clock.advanceMillis(1); profiler.logEvent(ProfilerTask.TEST, "inner task " + i); profiler.completeTask(ProfilerTask.TEST); } profiler.stop(); ProfileInfo info = ProfileInfo.loadProfile(cacheFile); info.calculateStats(); assertThat(info.isCorruptedOrIncomplete()).isFalse(); Path corruptedFile = cacheDir.getRelative("profile5bad.dat"); FileSystemUtils.writeContent( corruptedFile, Arrays.copyOf(FileSystemUtils.readContent(cacheFile), 2000)); info = ProfileInfo.loadProfile(corruptedFile); info.calculateStats(); assertThat(info.isCorruptedOrIncomplete()).isTrue(); // Since root tasks will appear after nested tasks in the profile file and // we have exactly one nested task for each root task, the following will always // be true for our corrupted file: // 0 <= number_of_all_tasks - 2*number_of_root_tasks <= 1 assertThat(info.allTasksById.size() / 2).isEqualTo(info.rootTasksById.size()); } @Test public void testUnsupportedProfilerRecord() throws Exception { Path dataFile = cacheDir.getRelative("profile5.dat"); profiler.start(ProfiledTaskKinds.ALL, dataFile.getOutputStream(), "phase test", false, BlazeClock.instance(), BlazeClock.instance().nanoTime()); profiler.startTask(ProfilerTask.TEST, "outer task"); profiler.logEvent(ProfilerTask.EXCEPTION, "inner task"); profiler.completeTask(ProfilerTask.TEST); profiler.startTask(ProfilerTask.SCANNER, "outer task 2"); profiler.logSimpleTask(Profiler.nanoTimeMaybe(), ProfilerTask.TEST, "inner task 2"); profiler.completeTask(ProfilerTask.SCANNER); profiler.stop(); // Validate our test profile. ProfileInfo info = ProfileInfo.loadProfile(dataFile); info.calculateStats(); assertThat(info.isCorruptedOrIncomplete()).isFalse(); assertThat(info.getStatsForType(ProfilerTask.TEST, info.rootTasksById).count).isEqualTo(2); assertThat(info.getStatsForType(ProfilerTask.UNKNOWN, info.rootTasksById).count).isEqualTo(0); // Now replace "TEST" type with something unsupported - e.g. "XXXX". InputStream in = new InflaterInputStream(dataFile.getInputStream(), new Inflater(false), 65536); byte[] buffer = new byte[60000]; int len = in.read(buffer); in.close(); assertThat(len).isLessThan(buffer.length); // Validate that file was completely decoded. String content = new String(buffer, ISO_8859_1); int infoIndex = content.indexOf("TEST"); assertThat(infoIndex).isGreaterThan(0); content = content.substring(0, infoIndex) + "XXXX" + content.substring(infoIndex + 4); OutputStream out = new DeflaterOutputStream(dataFile.getOutputStream(), new Deflater(Deflater.BEST_SPEED, false), 65536); out.write(content.getBytes(ISO_8859_1)); out.close(); // Validate that XXXX records were classified as UNKNOWN. info = ProfileInfo.loadProfile(dataFile); info.calculateStats(); assertThat(info.isCorruptedOrIncomplete()).isFalse(); assertThat(info.getStatsForType(ProfilerTask.TEST, info.rootTasksById).count).isEqualTo(0); assertThat(info.getStatsForType(ProfilerTask.SCANNER, info.rootTasksById).count).isEqualTo(1); assertThat(info.getStatsForType(ProfilerTask.EXCEPTION, info.rootTasksById).count).isEqualTo(1); assertThat(info.getStatsForType(ProfilerTask.UNKNOWN, info.rootTasksById).count).isEqualTo(2); } @Test public void testResilenceToNonDecreasingNanoTimes() throws Exception { final long initialNanoTime = BlazeClock.instance().nanoTime(); final AtomicInteger numNanoTimeCalls = new AtomicInteger(0); Clock badClock = new Clock() { @Override public long currentTimeMillis() { return BlazeClock.instance().currentTimeMillis(); } @Override public long nanoTime() { return initialNanoTime - numNanoTimeCalls.addAndGet(1); } }; Path cacheFile = cacheDir.getRelative("profile1.dat"); profiler.start(ProfiledTaskKinds.ALL, cacheFile.getOutputStream(), "testResilenceToNonDecreasingNanoTimes", false, badClock, initialNanoTime); profiler.logSimpleTask(badClock.nanoTime(), ProfilerTask.TEST, "some task"); profiler.stop(); } }
damienmg/bazel
src/test/java/com/google/devtools/build/lib/profiler/ProfilerTest.java
Java
apache-2.0
19,092
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.runtimefields.query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.ByteRunAutomaton; import org.elasticsearch.script.Script; import org.elasticsearch.xpack.runtimefields.mapper.StringFieldScript; import java.util.List; import java.util.Objects; public class StringScriptFieldRangeQuery extends AbstractStringScriptFieldQuery { private final String lowerValue; private final String upperValue; private final boolean includeLower; private final boolean includeUpper; public StringScriptFieldRangeQuery( Script script, StringFieldScript.LeafFactory leafFactory, String fieldName, String lowerValue, String upperValue, boolean includeLower, boolean includeUpper ) { super(script, leafFactory, fieldName); this.lowerValue = Objects.requireNonNull(lowerValue); this.upperValue = Objects.requireNonNull(upperValue); this.includeLower = includeLower; this.includeUpper = includeUpper; assert lowerValue.compareTo(upperValue) <= 0; } @Override protected boolean matches(List<String> values) { for (String value : values) { int lct = lowerValue.compareTo(value); boolean lowerOk = includeLower ? lct <= 0 : lct < 0; if (lowerOk) { int uct = upperValue.compareTo(value); boolean upperOk = includeUpper ? uct >= 0 : uct > 0; if (upperOk) { return true; } } } return false; } @Override public void visit(QueryVisitor visitor) { if (visitor.acceptField(fieldName())) { visitor.consumeTermsMatching( this, fieldName(), () -> new ByteRunAutomaton( Automata.makeBinaryInterval(new BytesRef(lowerValue), includeLower, new BytesRef(upperValue), includeUpper) ) ); } } @Override public final String toString(String field) { StringBuilder b = new StringBuilder(); if (false == fieldName().contentEquals(field)) { b.append(fieldName()).append(':'); } b.append(includeLower ? '[' : '{'); b.append(lowerValue).append(" TO ").append(upperValue); b.append(includeUpper ? ']' : '}'); return b.toString(); } @Override public int hashCode() { return Objects.hash(super.hashCode(), lowerValue, upperValue, includeLower, includeUpper); } @Override public boolean equals(Object obj) { if (false == super.equals(obj)) { return false; } StringScriptFieldRangeQuery other = (StringScriptFieldRangeQuery) obj; return lowerValue.equals(other.lowerValue) && upperValue.equals(other.upperValue) && includeLower == other.includeLower && includeUpper == other.includeUpper; } String lowerValue() { return lowerValue; } String upperValue() { return upperValue; } boolean includeLower() { return includeLower; } boolean includeUpper() { return includeUpper; } }
nknize/elasticsearch
x-pack/plugin/runtime-fields/src/main/java/org/elasticsearch/xpack/runtimefields/query/StringScriptFieldRangeQuery.java
Java
apache-2.0
3,629
/* * Copyright 2014 Dominick Baier, Brock Allen * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using IdentityManager.Api.Models; using System; using System.Linq; namespace IdentityManager { static class IdentityManagerResultExtensions { public static ErrorModel ToError(this IdentityManagerResult result) { if (result == null) throw new ArgumentNullException("result"); return new ErrorModel { Errors = result.Errors.ToArray() }; } } }
Ernesto99/IdentityManager
source/Core/Extensions/IdentityManagerResultExtensions.cs
C#
apache-2.0
1,056
/* * Copyright © 2013-2018 camunda services GmbH and various authors (info@camunda.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.container.impl.jboss.deployment.marker; import java.util.List; import org.camunda.bpm.application.AbstractProcessApplication; import org.camunda.bpm.application.impl.metadata.spi.ProcessesXml; import org.camunda.bpm.container.impl.jboss.util.ProcessesXmlWrapper; import org.jboss.as.ee.component.ComponentDescription; import org.jboss.as.server.deployment.AttachmentKey; import org.jboss.as.server.deployment.AttachmentList; import org.jboss.as.server.deployment.DeploymentUnit; import org.jboss.jandex.AnnotationInstance; /** * * @author Daniel Meyer * */ public class ProcessApplicationAttachments { private static final AttachmentKey<Boolean> MARKER = AttachmentKey.create(Boolean.class); private static final AttachmentKey<Boolean> PART_OF_MARKER = AttachmentKey.create(Boolean.class); private static final AttachmentKey<AttachmentList<ProcessesXmlWrapper>> PROCESSES_XML_LIST = AttachmentKey.createList(ProcessesXmlWrapper.class); private static final AttachmentKey<ComponentDescription> PA_COMPONENT = AttachmentKey.create(ComponentDescription.class); private static final AttachmentKey<AnnotationInstance> POST_DEPLOY_METHOD = AttachmentKey.create(AnnotationInstance.class); private static final AttachmentKey<AnnotationInstance> PRE_UNDEPLOY_METHOD = AttachmentKey.create(AnnotationInstance.class); /** * Attach the parsed ProcessesXml file to a deployment unit. * */ public static void addProcessesXml(DeploymentUnit unit, ProcessesXmlWrapper processesXmlWrapper) { unit.addToAttachmentList(PROCESSES_XML_LIST, processesXmlWrapper); } /** * Returns the attached {@link ProcessesXml} marker or null; * */ public static List<ProcessesXmlWrapper> getProcessesXmls(DeploymentUnit deploymentUnit) { return deploymentUnit.getAttachmentList(PROCESSES_XML_LIST); } /** * marks a a {@link DeploymentUnit} as a process application */ public static void mark(DeploymentUnit unit) { unit.putAttachment(MARKER, Boolean.TRUE); } /** * marks a a {@link DeploymentUnit} as part of a process application */ public static void markPartOfProcessApplication(DeploymentUnit unit) { if(unit.getParent() != null && unit.getParent() != unit) { unit.getParent().putAttachment(PART_OF_MARKER, Boolean.TRUE); } } /** * return true if the deployment unit is either itself a process * application or part of a process application. */ public static boolean isPartOfProcessApplication(DeploymentUnit unit) { if(isProcessApplication(unit)) { return true; } if(unit.getParent() != null && unit.getParent() != unit) { return unit.getParent().hasAttachment(PART_OF_MARKER); } return false; } /** * Returns true if the {@link DeploymentUnit} itself is a process application (carries a processes.xml) * */ public static boolean isProcessApplication(DeploymentUnit deploymentUnit) { return deploymentUnit.hasAttachment(MARKER); } /** * Returns the {@link ComponentDescription} for the {@link AbstractProcessApplication} component */ public static ComponentDescription getProcessApplicationComponent(DeploymentUnit deploymentUnit) { return deploymentUnit.getAttachment(PA_COMPONENT); } /** * Attach the {@link ComponentDescription} for the {@link AbstractProcessApplication} component */ public static void attachProcessApplicationComponent(DeploymentUnit deploymentUnit, ComponentDescription componentDescription){ deploymentUnit.putAttachment(PA_COMPONENT, componentDescription); } /** * Attach the {@link AnnotationInstance}s for the PostDeploy methods */ public static void attachPostDeployDescription(DeploymentUnit deploymentUnit, AnnotationInstance annotation){ deploymentUnit.putAttachment(POST_DEPLOY_METHOD, annotation); } /** * Attach the {@link AnnotationInstance}s for the PreUndeploy methods */ public static void attachPreUndeployDescription(DeploymentUnit deploymentUnit, AnnotationInstance annotation){ deploymentUnit.putAttachment(PRE_UNDEPLOY_METHOD, annotation); } /** * @return the description of the PostDeploy method */ public static AnnotationInstance getPostDeployDescription(DeploymentUnit deploymentUnit) { return deploymentUnit.getAttachment(POST_DEPLOY_METHOD); } /** * @return the description of the PreUndeploy method */ public static AnnotationInstance getPreUndeployDescription(DeploymentUnit deploymentUnit) { return deploymentUnit.getAttachment(PRE_UNDEPLOY_METHOD); } private ProcessApplicationAttachments() { } }
xasx/camunda-bpm-platform
distro/wildfly8/subsystem/src/main/java/org/camunda/bpm/container/impl/jboss/deployment/marker/ProcessApplicationAttachments.java
Java
apache-2.0
5,340
/* Copyright 2010, Object Management Group, Inc. * Copyright 2010, PrismTech, Inc. * Copyright 2010, Real-Time Innovations, Inc. * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.omg.dds.type.typeobject; import java.util.List; import org.omg.dds.type.Extensibility; import org.omg.dds.type.ID; import org.omg.dds.type.Nested; @Extensibility(Extensibility.Kind.MUTABLE_EXTENSIBILITY) @Nested public interface UnionType extends Type { // ----------------------------------------------------------------------- // Properties // ----------------------------------------------------------------------- @ID(MemberId.MEMBER_UNIONTYPE_MEMBER_ID) public List<UnionMember> getMember(); // ----------------------------------------------------------------------- // Types // ----------------------------------------------------------------------- public static final class MemberId { // --- Constants: ---------------------------------------------------- public static final int MEMBER_UNIONTYPE_MEMBER_ID = 100; // --- Constructor: -------------------------------------------------- private MemberId() { // empty } } }
steveturner/datadistrib4j
srcJava/org/omg/dds/type/typeobject/UnionType.java
Java
apache-2.0
1,765
package alien4cloud.tosca.parser.postprocess; import static alien4cloud.utils.AlienUtils.safe; import javax.annotation.Resource; import org.alien4cloud.tosca.model.types.NodeType; import org.springframework.stereotype.Component; /** * Post process a node type. */ @Component public class NodeTypePostProcessor implements IPostProcessor<NodeType> { @Resource private CapabilityDefinitionPostProcessor capabilityDefinitionPostProcessor; @Resource private RequirementDefinitionPostProcessor requirementDefinitionPostProcessor; @Override public void process(NodeType instance) { safe(instance.getCapabilities()).forEach(capabilityDefinitionPostProcessor); safe(instance.getRequirements()).forEach(requirementDefinitionPostProcessor); } }
alien4cloud/alien4cloud
alien4cloud-tosca/src/main/java/alien4cloud/tosca/parser/postprocess/NodeTypePostProcessor.java
Java
apache-2.0
784
/* Copyright IBM Corp. All Rights Reserved. SPDX-License-Identifier: Apache-2.0 */ package protoutil import ( "bytes" "crypto/sha256" "encoding/asn1" "math/big" "github.com/golang/protobuf/proto" cb "github.com/hyperledger/fabric-protos-go/common" "github.com/pkg/errors" ) // NewBlock constructs a block with no data and no metadata. func NewBlock(seqNum uint64, previousHash []byte) *cb.Block { block := &cb.Block{} block.Header = &cb.BlockHeader{} block.Header.Number = seqNum block.Header.PreviousHash = previousHash block.Header.DataHash = []byte{} block.Data = &cb.BlockData{} var metadataContents [][]byte for i := 0; i < len(cb.BlockMetadataIndex_name); i++ { metadataContents = append(metadataContents, []byte{}) } block.Metadata = &cb.BlockMetadata{Metadata: metadataContents} return block } type asn1Header struct { Number *big.Int PreviousHash []byte DataHash []byte } func BlockHeaderBytes(b *cb.BlockHeader) []byte { asn1Header := asn1Header{ PreviousHash: b.PreviousHash, DataHash: b.DataHash, Number: new(big.Int).SetUint64(b.Number), } result, err := asn1.Marshal(asn1Header) if err != nil { // Errors should only arise for types which cannot be encoded, since the // BlockHeader type is known a-priori to contain only encodable types, an // error here is fatal and should not be propagated panic(err) } return result } func BlockHeaderHash(b *cb.BlockHeader) []byte { sum := sha256.Sum256(BlockHeaderBytes(b)) return sum[:] } func BlockDataHash(b *cb.BlockData) []byte { sum := sha256.Sum256(bytes.Join(b.Data, nil)) return sum[:] } // GetChannelIDFromBlockBytes returns channel ID given byte array which represents // the block func GetChannelIDFromBlockBytes(bytes []byte) (string, error) { block, err := UnmarshalBlock(bytes) if err != nil { return "", err } return GetChannelIDFromBlock(block) } // GetChannelIDFromBlock returns channel ID in the block func GetChannelIDFromBlock(block *cb.Block) (string, error) { if block == nil || block.Data == nil || block.Data.Data == nil || len(block.Data.Data) == 0 { return "", errors.New("failed to retrieve channel id - block is empty") } var err error envelope, err := GetEnvelopeFromBlock(block.Data.Data[0]) if err != nil { return "", err } payload, err := UnmarshalPayload(envelope.Payload) if err != nil { return "", err } if payload.Header == nil { return "", errors.New("failed to retrieve channel id - payload header is empty") } chdr, err := UnmarshalChannelHeader(payload.Header.ChannelHeader) if err != nil { return "", err } return chdr.ChannelId, nil } // GetMetadataFromBlock retrieves metadata at the specified index. func GetMetadataFromBlock(block *cb.Block, index cb.BlockMetadataIndex) (*cb.Metadata, error) { if block.Metadata == nil { return nil, errors.New("no metadata in block") } if len(block.Metadata.Metadata) <= int(index) { return nil, errors.Errorf("no metadata at index [%s]", index) } md := &cb.Metadata{} err := proto.Unmarshal(block.Metadata.Metadata[index], md) if err != nil { return nil, errors.Wrapf(err, "error unmarshalling metadata at index [%s]", index) } return md, nil } // GetMetadataFromBlockOrPanic retrieves metadata at the specified index, or // panics on error func GetMetadataFromBlockOrPanic(block *cb.Block, index cb.BlockMetadataIndex) *cb.Metadata { md, err := GetMetadataFromBlock(block, index) if err != nil { panic(err) } return md } // GetConsenterMetadataFromBlock attempts to retrieve consenter metadata from the value // stored in block metadata at index SIGNATURES (first field). If no consenter metadata // is found there, it falls back to index ORDERER (third field). func GetConsenterMetadataFromBlock(block *cb.Block) (*cb.Metadata, error) { m, err := GetMetadataFromBlock(block, cb.BlockMetadataIndex_SIGNATURES) if err != nil { return nil, errors.WithMessage(err, "failed to retrieve metadata") } // TODO FAB-15864 Remove this fallback when we can stop supporting upgrade from pre-1.4.1 orderer if len(m.Value) == 0 { return GetMetadataFromBlock(block, cb.BlockMetadataIndex_ORDERER) } obm := &cb.OrdererBlockMetadata{} err = proto.Unmarshal(m.Value, obm) if err != nil { return nil, errors.Wrap(err, "failed to unmarshal orderer block metadata") } res := &cb.Metadata{} err = proto.Unmarshal(obm.ConsenterMetadata, res) if err != nil { return nil, errors.Wrap(err, "failed to unmarshal consenter metadata") } return res, nil } // GetLastConfigIndexFromBlock retrieves the index of the last config block as // encoded in the block metadata func GetLastConfigIndexFromBlock(block *cb.Block) (uint64, error) { m, err := GetMetadataFromBlock(block, cb.BlockMetadataIndex_SIGNATURES) if err != nil { return 0, errors.WithMessage(err, "failed to retrieve metadata") } // TODO FAB-15864 Remove this fallback when we can stop supporting upgrade from pre-1.4.1 orderer if len(m.Value) == 0 { m, err := GetMetadataFromBlock(block, cb.BlockMetadataIndex_LAST_CONFIG) if err != nil { return 0, errors.WithMessage(err, "failed to retrieve metadata") } lc := &cb.LastConfig{} err = proto.Unmarshal(m.Value, lc) if err != nil { return 0, errors.Wrap(err, "error unmarshalling LastConfig") } return lc.Index, nil } obm := &cb.OrdererBlockMetadata{} err = proto.Unmarshal(m.Value, obm) if err != nil { return 0, errors.Wrap(err, "failed to unmarshal orderer block metadata") } return obm.LastConfig.Index, nil } // GetLastConfigIndexFromBlockOrPanic retrieves the index of the last config // block as encoded in the block metadata, or panics on error func GetLastConfigIndexFromBlockOrPanic(block *cb.Block) uint64 { index, err := GetLastConfigIndexFromBlock(block) if err != nil { panic(err) } return index } // CopyBlockMetadata copies metadata from one block into another func CopyBlockMetadata(src *cb.Block, dst *cb.Block) { dst.Metadata = src.Metadata // Once copied initialize with rest of the // required metadata positions. InitBlockMetadata(dst) } // InitBlockMetadata initializes metadata structure func InitBlockMetadata(block *cb.Block) { if block.Metadata == nil { block.Metadata = &cb.BlockMetadata{Metadata: [][]byte{{}, {}, {}, {}, {}}} } else if len(block.Metadata.Metadata) < int(cb.BlockMetadataIndex_COMMIT_HASH+1) { for i := int(len(block.Metadata.Metadata)); i <= int(cb.BlockMetadataIndex_COMMIT_HASH); i++ { block.Metadata.Metadata = append(block.Metadata.Metadata, []byte{}) } } }
hyperledger/fabric
protoutil/blockutils.go
GO
apache-2.0
6,557
/* * Copyright 2021 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.profiler.plugin.config; import com.navercorp.pinpoint.bootstrap.config.DefaultProfilerConfig; import com.navercorp.pinpoint.bootstrap.config.Value; import com.navercorp.pinpoint.common.util.StringUtils; import java.util.Collections; import java.util.List; public class DefaultPluginLoadingConfig implements PluginLoadingConfig { // ArtifactIdUtils.ARTIFACT_SEPARATOR private static final String ARTIFACT_SEPARATOR = ";"; private List<String> pluginLoadOrder = Collections.emptyList(); private List<String> disabledPlugins = Collections.emptyList(); private List<String> importPluginIds = Collections.emptyList(); public DefaultPluginLoadingConfig() { } @Override public List<String> getPluginLoadOrder() { return pluginLoadOrder; } @Value("${profiler.plugin.load.order}") public void setPluginLoadOrder(String pluginLoadOrder) { this.pluginLoadOrder = StringUtils.tokenizeToStringList(pluginLoadOrder, ","); } @Override public List<String> getDisabledPlugins() { return disabledPlugins; } @Value("${profiler.plugin.disable}") public void setDisabledPlugins(String disabledPlugins) { this.disabledPlugins = StringUtils.tokenizeToStringList(disabledPlugins, ","); } @Override public List<String> getImportPluginIds() { return importPluginIds; } @Value("${" + DefaultProfilerConfig.IMPORT_PLUGIN + "}") public void setImportPluginIds(String importPluginIds) { this.importPluginIds = StringUtils.tokenizeToStringList(importPluginIds, ARTIFACT_SEPARATOR); } @Override public String toString() { return "DefaultPluginLoadingConfig{" + "pluginLoadOrder=" + pluginLoadOrder + ", disabledPlugins=" + disabledPlugins + ", importPluginIds=" + importPluginIds + '}'; } }
emeroad/pinpoint
profiler/src/main/java/com/navercorp/pinpoint/profiler/plugin/config/DefaultPluginLoadingConfig.java
Java
apache-2.0
2,527
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author Pavel N. Vyssotski */ // OptionParser.cpp #include <cstring> #include "AgentBase.h" #include "MemoryManager.h" #include "AgentException.h" #include "Log.h" #include "OptionParser.h" using namespace jdwp; using namespace std; OptionParser::OptionParser() throw() { m_optionCount = 0; m_optionString = 0; m_options = 0; m_help = false; m_suspend = true; m_server = false; m_timeout = 0; m_transport = 0; m_address = 0; m_log = 0; m_kindFilter = 0; m_srcFilter = 0; m_onuncaught = false; m_onthrow = 0; m_launch = 0; } bool OptionParser::AsciiToBool(const char *str) throw(IllegalArgumentException) { if (strcmp("y", str) == 0) { return true; } else if (strcmp("n", str) == 0) { return false; } else { throw IllegalArgumentException(); } } void OptionParser::Parse(const char* str) throw(AgentException) { size_t i; int k; if (str == 0) return; const size_t len = strlen(str); if (len == 0) return; for (i = 0; i < len; i++) { if (str[i] == ',') { m_optionCount++; } else if (str[i] == '"' || str[i] == '\'') { char quote = str[i]; if (i > 0 && str[i-1] != '=') { throw IllegalArgumentException(); } i++; while (i < len && str[i] != quote) { i++; } if (i+1 < len && str[i+1] != ',') { throw IllegalArgumentException(); } } } m_optionCount++; m_optionString = reinterpret_cast<char*>(AgentBase::GetMemoryManager(). Allocate(len + 1 JDWP_FILE_LINE)); strcpy(m_optionString, str); m_options = reinterpret_cast<Option*>(AgentBase::GetMemoryManager(). Allocate(m_optionCount * sizeof(Option) JDWP_FILE_LINE)); m_options[0].name = m_optionString; m_options[0].value = ""; k = 0; bool waitEndOfOption = false; for (i = 0; i < len && k < m_optionCount; i++) { if ((m_optionString[i] == '=') && (!waitEndOfOption)) { waitEndOfOption = true; m_optionString[i] = '\0'; m_options[k].value = &m_optionString[i+1]; } else if (m_optionString[i] == ',') { waitEndOfOption = false; m_optionString[i] = '\0'; k++; m_options[k].name = &m_optionString[i+1]; m_options[k].value = ""; } else if (m_optionString[i] == '"' || m_optionString[i] == '\'') { char quote = m_optionString[i]; m_optionString[i] = '\0'; m_options[k].value = &m_optionString[i+1]; i++; while (i < len && m_optionString[i] != quote) { i++; } if (i < len) { m_optionString[i] = '\0'; } } } for (k = 0; k < m_optionCount; k++) { if (strcmp("transport", m_options[k].name) == 0) { m_transport = m_options[k].value; } else if (strcmp("address", m_options[k].name) == 0) { m_address = m_options[k].value; } else if (strcmp("timeout", m_options[k].name) == 0) { m_timeout = atol(m_options[k].value); } else if (strcmp("suspend", m_options[k].name) == 0) { m_suspend = AsciiToBool(m_options[k].value); } else if (strcmp("server", m_options[k].name) == 0) { m_server = AsciiToBool(m_options[k].value); } else if (strcmp("launch", m_options[k].name) == 0) { m_launch = m_options[k].value; } else if (strcmp("onuncaught", m_options[k].name) == 0) { m_onuncaught = AsciiToBool(m_options[k].value); } else if (strcmp("onthrow", m_options[k].name) == 0) { m_onthrow = m_options[k].value; } else if (strcmp("help", m_options[k].name) == 0) { m_help = true; #ifndef NDEBUG } else if (strcmp("log", m_options[k].name) == 0) { m_log = m_options[k].value; } else if (strcmp("trace", m_options[k].name) == 0) { m_kindFilter = m_options[k].value; } else if (strcmp("src", m_options[k].name) == 0) { m_srcFilter = m_options[k].value; #endif // NDEBUG } } if ((m_onthrow != 0) || (m_onuncaught != 0)) { if (m_launch == 0) { JDWP_ERROR("Specify launch=<command line> when using onthrow or onuncaught option"); throw IllegalArgumentException(); } } } OptionParser::~OptionParser() throw() { if (m_optionString != 0) AgentBase::GetMemoryManager().Free(m_optionString JDWP_FILE_LINE); if (m_options != 0) AgentBase::GetMemoryManager().Free(m_options JDWP_FILE_LINE); } const char *OptionParser::FindOptionValue(const char *name) const throw() { for (int i = 0; i < m_optionCount; i++) { if (strcmp(name, m_options[i].name) == 0) { return m_options[i].value; } } return 0; }
freeVM/freeVM
enhanced/java/jdktools/modules/jpda/src/main/native/jdwp/common/agent/core/OptionParser.cpp
C++
apache-2.0
5,843
package org.jaudiotagger.audio.mp4; import org.jaudiotagger.audio.generic.GenericAudioHeader; import org.jaudiotagger.audio.mp4.atom.Mp4EsdsBox; /** * Store some additional attributes not available for all audio types */ public class Mp4AudioHeader extends GenericAudioHeader { /** * The key for the kind field<br> * * @see #content */ public final static String FIELD_KIND = "KIND"; /** * The key for the profile<br> * * @see #content */ public final static String FIELD_PROFILE = "PROFILE"; /** * The key for the ftyp brand<br> * * @see #content */ public final static String FIELD_BRAND = "BRAND"; public void setKind(Mp4EsdsBox.Kind kind) { content.put(FIELD_KIND, kind); } /** * @return kind */ public Mp4EsdsBox.Kind getKind() { return (Mp4EsdsBox.Kind) content.get(FIELD_KIND); } /** * The key for the profile * * @param profile */ public void setProfile(Mp4EsdsBox.AudioProfile profile) { content.put(FIELD_PROFILE, profile); } /** * @return audio profile */ public Mp4EsdsBox.AudioProfile getProfile() { return (Mp4EsdsBox.AudioProfile) content.get(FIELD_PROFILE); } /** * @param brand */ public void setBrand(String brand) { content.put(FIELD_BRAND, brand); } /** * @return brand */ public String getBrand() { return (String) content.get(FIELD_BRAND); } }
dubenju/javay
src/java/org/jaudiotagger/audio/mp4/Mp4AudioHeader.java
Java
apache-2.0
1,542
/** * @license * Copyright 2020 The FOAM Authors. All Rights Reserved. * http://www.apache.org/licenses/LICENSE-2.0 */ foam.CLASS({ package: 'foam.nanos.crunch.lite', name: 'MinMaxCapabilityRefinement', refines: 'foam.nanos.crunch.MinMaxCapability', implements: [ 'foam.nanos.crunch.lite.CapableCompatibleCapability' ], javaImports: [ 'foam.nanos.crunch.CapabilityJunctionPayload', 'foam.nanos.crunch.CrunchService', 'static foam.nanos.crunch.CapabilityJunctionStatus.*' ], methods: [ { name: 'getCapableChainedStatus', documentation: ` numberGrantedOrPending are the available CapablePayloads which are GRANTED or can eventually be turned into GRANTED from PENDING state. If MinMaxCapability.min is greater than the number of available payloads which are GRANTED or can eventually be turned into GRANTED, then it is impossible for the total amount of GRANTED payloads to be greater than the MIN, thereby fulfilling the minimum requirement. For example, let there be a min max capablity which has 10 prerequisites and a min of 2. If the user selected only 3 of those prereqs in the wizard, then the CapablePayload.status for those 3 will each be in PENDING state with approvals generated for each one. Note, there will only be these 3 CapablePayloads out of the 10 Prereqs avaliable on the Capable object since the user only selected 3. If 1 of those 3 CapablePayloads get rejected. Then there will be 2 numberGrantedOrPending which could still potentially satisfy the min requirement of 2 if those 2 CapablePayloads get set to GRANTED. If 2 of those 3 CapablePayloads get rejected. Then there will be 1 numberGrantedOrPending which would be impossible to satisfy the MinMaxCapability.min requirement of 2 even if that 1 CapablePayload is GRANTED. `, javaCode: ` CrunchService crunchService = (CrunchService) x.get("crunchService"); List<String> prereqCapIds = crunchService.getPrereqs(getId()); int numberGranted = 0; int numberPending = 0; int numberRejected = 0; for ( String capId : prereqCapIds ) { CapabilityJunctionPayload prereqPayload = (CapabilityJunctionPayload) capablePayloadDAO.find(capId); if ( prereqPayload == null ) { continue; } switch ( prereqPayload.getStatus() ) { case GRANTED: numberGranted++; continue; case PENDING: case APPROVED: numberPending++; continue; case REJECTED: numberRejected++; continue; } } int numberTotal = numberGranted + numberPending + numberRejected; int numberGrantedOrPending = numberGranted + numberPending; if ( numberTotal == 0 ){ return CapabilityJunctionStatus.ACTION_REQUIRED; } if ( getMin() > numberGrantedOrPending ){ return CapabilityJunctionStatus.REJECTED; } if ( numberGranted >= getMin() ) { return CapabilityJunctionStatus.GRANTED; } if ( numberTotal >= getMin() ) { return CapabilityJunctionStatus.PENDING; } return CapabilityJunctionStatus.ACTION_REQUIRED; ` } ] });
jacksonic/vjlofvhjfgm
src/foam/nanos/crunch/lite/MinMaxCapabilityRefinement.js
JavaScript
apache-2.0
3,413
/* * Copyright 2008-2013 LinkedIn, Inc * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package voldemort.cluster; import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import voldemort.VoldemortException; import voldemort.annotations.concurrency.Threadsafe; import voldemort.annotations.jmx.JmxGetter; import voldemort.annotations.jmx.JmxManaged; import voldemort.utils.Utils; import com.google.common.collect.Sets; /** * A representation of the voldemort cluster * * */ @Threadsafe @JmxManaged(description = "Metadata about the physical servers on which the Voldemort cluster runs") public class Cluster implements Serializable { private static final long serialVersionUID = 1; private final String name; private final int numberOfPartitionIds; private final Map<Integer, Node> nodesById; private final Map<Integer, Zone> zonesById; private final Map<Zone, List<Integer>> nodesPerZone; private final Map<Zone, List<Integer>> partitionsPerZone; // Since partitionId space must be dense, arrays could be used instead of // maps. To do so, the partition ID range would have to be determined. This // could be done by summing up the lengths of each node's .getPartitionIds() // returned list. This could be faster to construct and lookup by some // constant and memory footprint could be better. private final Map<Integer, Zone> partitionIdToZone; private final Node[] partitionIdToNodeArray; private final Map<Integer, Node> partitionIdToNode; private final Map<Integer, Integer> partitionIdToNodeId; public Cluster(String name, List<Node> nodes) { this(name, nodes, new ArrayList<Zone>()); } public Cluster(String name, List<Node> nodes, List<Zone> zones) { this.name = Utils.notNull(name); this.partitionsPerZone = new LinkedHashMap<Zone, List<Integer>>(); this.nodesPerZone = new LinkedHashMap<Zone, List<Integer>>(); this.partitionIdToZone = new HashMap<Integer, Zone>(); Map<Integer, Node> partitionIdToNodeMap = new HashMap<Integer, Node>(); this.partitionIdToNode = new HashMap<Integer, Node>(); this.partitionIdToNodeId = new HashMap<Integer, Integer>(); if(zones.size() != 0) { zonesById = new LinkedHashMap<Integer, Zone>(zones.size()); for(Zone zone: zones) { if(zonesById.containsKey(zone.getId())) throw new IllegalArgumentException("Zone id " + zone.getId() + " appears twice in the zone list."); zonesById.put(zone.getId(), zone); nodesPerZone.put(zone, new ArrayList<Integer>()); partitionsPerZone.put(zone, new ArrayList<Integer>()); } } else { // Add default zone zonesById = new LinkedHashMap<Integer, Zone>(1); Zone defaultZone = new Zone(); zonesById.put(defaultZone.getId(), defaultZone); nodesPerZone.put(defaultZone, new ArrayList<Integer>()); partitionsPerZone.put(defaultZone, new ArrayList<Integer>()); } this.nodesById = new LinkedHashMap<Integer, Node>(nodes.size()); for(Node node: nodes) { if(nodesById.containsKey(node.getId())) throw new IllegalArgumentException("Node id " + node.getId() + " appears twice in the node list."); nodesById.put(node.getId(), node); Zone nodesZone = zonesById.get(node.getZoneId()); if(nodesZone == null) { throw new IllegalArgumentException("No zone associated with this node exists."); } nodesPerZone.get(nodesZone).add(node.getId()); partitionsPerZone.get(nodesZone).addAll(node.getPartitionIds()); for(Integer partitionId: node.getPartitionIds()) { if(this.partitionIdToNodeId.containsKey(partitionId)) { throw new IllegalArgumentException("Partition id " + partitionId + " found on two nodes : " + node.getId() + " and " + this.partitionIdToNodeId.get(partitionId)); } this.partitionIdToZone.put(partitionId, nodesZone); partitionIdToNodeMap.put(partitionId, node); this.partitionIdToNode.put(partitionId, node); this.partitionIdToNodeId.put(partitionId, node.getId()); } } this.numberOfPartitionIds = getNumberOfTags(nodes); this.partitionIdToNodeArray = new Node[this.numberOfPartitionIds]; for(int partitionId = 0; partitionId < this.numberOfPartitionIds; partitionId++) { this.partitionIdToNodeArray[partitionId] = partitionIdToNodeMap.get(partitionId); } } private int getNumberOfTags(List<Node> nodes) { List<Integer> tags = new ArrayList<Integer>(); for(Node node: nodes) { tags.addAll(node.getPartitionIds()); } Collections.sort(tags); for(int i = 0; i < numberOfPartitionIds; i++) { if(tags.get(i).intValue() != i) throw new IllegalArgumentException("Invalid tag assignment."); } return tags.size(); } @JmxGetter(name = "name", description = "The name of the cluster") public String getName() { return name; } public Collection<Node> getNodes() { return nodesById.values(); } /** * @return Sorted set of node Ids */ public Set<Integer> getNodeIds() { Set<Integer> nodeIds = nodesById.keySet(); return new TreeSet<Integer>(nodeIds); } /** * * @return Sorted set of Zone Ids */ public Set<Integer> getZoneIds() { Set<Integer> zoneIds = zonesById.keySet(); return new TreeSet<Integer>(zoneIds); } public Collection<Zone> getZones() { return zonesById.values(); } public Zone getZoneById(int id) { Zone zone = zonesById.get(id); if(zone == null) { throw new VoldemortException("No such zone in cluster: " + id + " Available zones : " + displayZones()); } return zone; } private String displayZones() { String zoneIDS = "{"; for(Zone z: this.getZones()) { if(zoneIDS.length() != 1) zoneIDS += ","; zoneIDS += z.getId(); } zoneIDS += "}"; return zoneIDS; } public int getNumberOfZones() { return zonesById.size(); } public int getNumberOfPartitionsInZone(Integer zoneId) { return partitionsPerZone.get(getZoneById(zoneId)).size(); } public int getNumberOfNodesInZone(Integer zoneId) { return nodesPerZone.get(getZoneById(zoneId)).size(); } /** * @return Sorted set of node Ids for given zone */ public Set<Integer> getNodeIdsInZone(Integer zoneId) { return new TreeSet<Integer>(nodesPerZone.get(getZoneById(zoneId))); } /** * @return Sorted set of partition Ids for given zone */ public Set<Integer> getPartitionIdsInZone(Integer zoneId) { return new TreeSet<Integer>(partitionsPerZone.get(getZoneById(zoneId))); } public Zone getZoneForPartitionId(int partitionId) { return partitionIdToZone.get(partitionId); } public Node getNodeForPartitionId(int partitionId) { return this.partitionIdToNodeArray[partitionId]; } public Node[] getPartitionIdToNodeArray() { return this.partitionIdToNodeArray; } /** * * @return Map of partition id to node id. */ public Map<Integer, Integer> getPartitionIdToNodeIdMap() { return new HashMap<Integer, Integer>(partitionIdToNodeId); } public Node getNodeById(int id) { Node node = nodesById.get(id); if(node == null) throw new VoldemortException("No such node in cluster: " + id); return node; } /** * Given a cluster and a node id checks if the node exists * * @param nodeId The node id to search for * @return True if cluster contains the node id, else false */ public boolean hasNodeWithId(int nodeId) { Node node = nodesById.get(nodeId); if(node == null) { return false; } return true; } @JmxGetter(name = "numberOfNodes", description = "The number of nodes in the cluster.") public int getNumberOfNodes() { return nodesById.size(); } public int getNumberOfPartitions() { return numberOfPartitionIds; } @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append("Cluster('"); builder.append(getName()); builder.append("', ["); for(Node n: getNodes()) { builder.append(n.toString()); builder.append('\n'); } builder.append("])"); return builder.toString(); } /** * Return a detailed string representation of the current cluster * * @param isDetailed * @return descripton of cluster */ public String toString(boolean isDetailed) { if(!isDetailed) { return toString(); } StringBuilder builder = new StringBuilder("Cluster [" + getName() + "] Nodes [" + getNumberOfNodes() + "] Zones [" + getNumberOfZones() + "] Partitions [" + getNumberOfPartitions() + "]"); builder.append(" Zone Info [" + getZones() + "]"); builder.append(" Node Info [" + getNodes() + "]"); return builder.toString(); } /** * Clones the cluster by constructing a new one with same name, partition * layout, and nodes. * * @param cluster * @return clone of Cluster cluster. */ public static Cluster cloneCluster(Cluster cluster) { // Could add a better .clone() implementation that clones the derived // data structures. The constructor invoked by this clone implementation // can be slow for large numbers of partitions. Probably faster to copy // all the maps and stuff. return new Cluster(cluster.getName(), new ArrayList<Node>(cluster.getNodes()), new ArrayList<Zone>(cluster.getZones())); /*- * Historic "clone" code being kept in case this, for some reason, was the "right" way to be doing this. ClusterMapper mapper = new ClusterMapper(); return mapper.readCluster(new StringReader(mapper.writeCluster(cluster))); */ } @Override public boolean equals(Object second) { if(this == second) return true; if(second == null || second.getClass() != getClass()) return false; Cluster secondCluster = (Cluster) second; if(this.getZones().size() != secondCluster.getZones().size()) { return false; } if(this.getNodes().size() != secondCluster.getNodes().size()) { return false; } for(Zone zoneA: this.getZones()) { Zone zoneB; try { zoneB = secondCluster.getZoneById(zoneA.getId()); } catch(VoldemortException e) { return false; } if(zoneB == null || zoneB.getProximityList().size() != zoneA.getProximityList().size()) { return false; } for(int index = 0; index < zoneA.getProximityList().size(); index++) { if(zoneA.getProximityList().get(index) != zoneB.getProximityList().get(index)) { return false; } } } for(Node nodeA: this.getNodes()) { Node nodeB; try { nodeB = secondCluster.getNodeById(nodeA.getId()); } catch(VoldemortException e) { return false; } if(nodeA.getNumberOfPartitions() != nodeB.getNumberOfPartitions()) { return false; } if(nodeA.getZoneId() != nodeB.getZoneId()) { return false; } if(!Sets.newHashSet(nodeA.getPartitionIds()) .equals(Sets.newHashSet(nodeB.getPartitionIds()))) return false; } return true; } @Override public int hashCode() { int hc = getNodes().size(); for(Node node: getNodes()) { hc ^= node.getHost().hashCode(); } return hc; } }
HB-SI/voldemort
src/java/voldemort/cluster/Cluster.java
Java
apache-2.0
13,745
# Copyright 2015 Rackspace US, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Fastfood Chef Cookbook manager.""" from __future__ import print_function import os from fastfood import utils class CookBook(object): """Chef Cookbook object. Understands metadata.rb, Berksfile and how to parse them. """ def __init__(self, path): """Initialize CookBook wrapper at 'path'.""" self.path = utils.normalize_path(path) self._metadata = None if not os.path.isdir(path): raise ValueError("Cookbook dir %s does not exist." % self.path) self._berksfile = None @property def name(self): """Cookbook name property.""" try: return self.metadata.to_dict()['name'] except KeyError: raise LookupError("%s is missing 'name' attribute'." % self.metadata) @property def metadata(self): """Return dict representation of this cookbook's metadata.rb .""" self.metadata_path = os.path.join(self.path, 'metadata.rb') if not os.path.isfile(self.metadata_path): raise ValueError("Cookbook needs metadata.rb, %s" % self.metadata_path) if not self._metadata: self._metadata = MetadataRb(open(self.metadata_path, 'r+')) return self._metadata @property def berksfile(self): """Return this cookbook's Berksfile instance.""" self.berks_path = os.path.join(self.path, 'Berksfile') if not self._berksfile: if not os.path.isfile(self.berks_path): raise ValueError("No Berksfile found at %s" % self.berks_path) self._berksfile = Berksfile(open(self.berks_path, 'r+')) return self._berksfile class MetadataRb(utils.FileWrapper): """Wrapper for a metadata.rb file.""" @classmethod def from_dict(cls, dictionary): """Create a MetadataRb instance from a dict.""" cookbooks = set() # put these in order groups = [cookbooks] for key, val in dictionary.items(): if key == 'depends': cookbooks.update({cls.depends_statement(cbn, meta) for cbn, meta in val.items()}) body = '' for group in groups: if group: body += '\n' body += '\n'.join(group) return cls.from_string(body) @staticmethod def depends_statement(cookbook_name, metadata=None): """Return a valid Ruby 'depends' statement for the metadata.rb file.""" line = "depends '%s'" % cookbook_name if metadata: if not isinstance(metadata, dict): raise TypeError("Stencil dependency options for %s " "should be a dict of options, not %s." % (cookbook_name, metadata)) if metadata: line = "%s '%s'" % (line, "', '".join(metadata)) return line def to_dict(self): """Return a dictionary representation of this metadata.rb file.""" return self.parse() def parse(self): """Parse the metadata.rb into a dict.""" data = utils.ruby_lines(self.readlines()) data = [tuple(j.strip() for j in line.split(None, 1)) for line in data] depends = {} for line in data: if not len(line) == 2: continue key, value = line if key == 'depends': value = value.split(',') lib = utils.ruby_strip(value[0]) detail = [utils.ruby_strip(j) for j in value[1:]] depends[lib] = detail datamap = {key: utils.ruby_strip(val) for key, val in data} if depends: datamap['depends'] = depends self.seek(0) return datamap def merge(self, other): """Add requirements from 'other' metadata.rb into this one.""" if not isinstance(other, MetadataRb): raise TypeError("MetadataRb to merge should be a 'MetadataRb' " "instance, not %s.", type(other)) current = self.to_dict() new = other.to_dict() # compare and gather cookbook dependencies meta_writelines = ['%s\n' % self.depends_statement(cbn, meta) for cbn, meta in new.get('depends', {}).items() if cbn not in current.get('depends', {})] self.write_statements(meta_writelines) return self.to_dict() class Berksfile(utils.FileWrapper): """Wrapper for a Berksfile.""" berks_options = [ 'branch', 'git', 'path', 'ref', 'revision', 'tag', ] def to_dict(self): """Return a dictionary representation of this Berksfile.""" return self.parse() def parse(self): """Parse this Berksfile into a dict.""" self.flush() self.seek(0) data = utils.ruby_lines(self.readlines()) data = [tuple(j.strip() for j in line.split(None, 1)) for line in data] datamap = {} for line in data: if len(line) == 1: datamap[line[0]] = True elif len(line) == 2: key, value = line if key == 'cookbook': datamap.setdefault('cookbook', {}) value = [utils.ruby_strip(v) for v in value.split(',')] lib, detail = value[0], value[1:] datamap['cookbook'].setdefault(lib, {}) # if there is additional dependency data but its # not the ruby hash, its the version constraint if detail and not any("".join(detail).startswith(o) for o in self.berks_options): constraint, detail = detail[0], detail[1:] datamap['cookbook'][lib]['constraint'] = constraint if detail: for deet in detail: opt, val = [ utils.ruby_strip(i) for i in deet.split(':', 1) ] if not any(opt == o for o in self.berks_options): raise ValueError( "Cookbook detail '%s' does not specify " "one of '%s'" % (opt, self.berks_options)) else: datamap['cookbook'][lib][opt.strip(':')] = ( utils.ruby_strip(val)) elif key == 'source': datamap.setdefault(key, []) datamap[key].append(utils.ruby_strip(value)) elif key: datamap[key] = utils.ruby_strip(value) self.seek(0) return datamap @classmethod def from_dict(cls, dictionary): """Create a Berksfile instance from a dict.""" cookbooks = set() sources = set() other = set() # put these in order groups = [sources, cookbooks, other] for key, val in dictionary.items(): if key == 'cookbook': cookbooks.update({cls.cookbook_statement(cbn, meta) for cbn, meta in val.items()}) elif key == 'source': sources.update({"source '%s'" % src for src in val}) elif key == 'metadata': other.add('metadata') body = '' for group in groups: if group: body += '\n' body += '\n'.join(group) return cls.from_string(body) @staticmethod def cookbook_statement(cookbook_name, metadata=None): """Return a valid Ruby 'cookbook' statement for the Berksfile.""" line = "cookbook '%s'" % cookbook_name if metadata: if not isinstance(metadata, dict): raise TypeError("Berksfile dependency hash for %s " "should be a dict of options, not %s." % (cookbook_name, metadata)) # not like the others... if 'constraint' in metadata: line += ", '%s'" % metadata.pop('constraint') for opt, spec in metadata.items(): line += ", %s: '%s'" % (opt, spec) return line def merge(self, other): """Add requirements from 'other' Berksfile into this one.""" if not isinstance(other, Berksfile): raise TypeError("Berksfile to merge should be a 'Berksfile' " "instance, not %s.", type(other)) current = self.to_dict() new = other.to_dict() # compare and gather cookbook dependencies berks_writelines = ['%s\n' % self.cookbook_statement(cbn, meta) for cbn, meta in new.get('cookbook', {}).items() if cbn not in current.get('cookbook', {})] # compare and gather 'source' requirements berks_writelines.extend(["source '%s'\n" % src for src in new.get('source', []) if src not in current.get('source', [])]) self.write_statements(berks_writelines) return self.to_dict()
samstav/fastfood
fastfood/book.py
Python
apache-2.0
10,125
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.bucket.composite; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.search.aggregations.LeafBucketCollector; import java.io.IOException; import static org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; import static org.elasticsearch.search.aggregations.support.ValuesSource.Bytes; import static org.elasticsearch.search.aggregations.support.ValuesSource.Bytes.WithOrdinals; final class CompositeValuesComparator { private final int size; private final CompositeValuesSource<?, ?>[] arrays; private boolean topValueSet = false; /** * * @param sources The list of {@link CompositeValuesSourceConfig} to build the composite buckets. * @param size The number of composite buckets to keep. */ CompositeValuesComparator(IndexReader reader, CompositeValuesSourceConfig[] sources, int size) { this.size = size; this.arrays = new CompositeValuesSource<?, ?>[sources.length]; for (int i = 0; i < sources.length; i++) { final int reverseMul = sources[i].reverseMul(); if (sources[i].valuesSource() instanceof WithOrdinals && reader instanceof DirectoryReader) { WithOrdinals vs = (WithOrdinals) sources[i].valuesSource(); arrays[i] = CompositeValuesSource.wrapGlobalOrdinals(vs, size, reverseMul); } else if (sources[i].valuesSource() instanceof Bytes) { Bytes vs = (Bytes) sources[i].valuesSource(); arrays[i] = CompositeValuesSource.wrapBinary(vs, size, reverseMul); } else if (sources[i].valuesSource() instanceof Numeric) { final Numeric vs = (Numeric) sources[i].valuesSource(); if (vs.isFloatingPoint()) { arrays[i] = CompositeValuesSource.wrapDouble(vs, size, reverseMul); } else { arrays[i] = CompositeValuesSource.wrapLong(vs, sources[i].format(), size, reverseMul); } } } } /** * Moves the values in <code>slot1</code> to <code>slot2</code>. */ void move(int slot1, int slot2) { assert slot1 < size && slot2 < size; for (int i = 0; i < arrays.length; i++) { arrays[i].move(slot1, slot2); } } /** * Compares the values in <code>slot1</code> with <code>slot2</code>. */ int compare(int slot1, int slot2) { assert slot1 < size && slot2 < size; for (int i = 0; i < arrays.length; i++) { int cmp = arrays[i].compare(slot1, slot2); if (cmp != 0) { return cmp; } } return 0; } /** * Returns true if a top value has been set for this comparator. */ boolean hasTop() { return topValueSet; } /** * Sets the top values for this comparator. */ void setTop(Comparable<?>[] values) { assert values.length == arrays.length; topValueSet = true; for (int i = 0; i < arrays.length; i++) { arrays[i].setTop(values[i]); } } /** * Compares the top values with the values in <code>slot</code>. */ int compareTop(int slot) { assert slot < size; for (int i = 0; i < arrays.length; i++) { int cmp = arrays[i].compareTop(slot); if (cmp != 0) { return cmp; } } return 0; } /** * Builds the {@link CompositeKey} for <code>slot</code>. */ CompositeKey toCompositeKey(int slot) throws IOException { assert slot < size; Comparable<?>[] values = new Comparable<?>[arrays.length]; for (int i = 0; i < values.length; i++) { values[i] = arrays[i].toComparable(slot); } return new CompositeKey(values); } /** * Gets the {@link LeafBucketCollector} that will record the composite buckets of the visited documents. */ CompositeValuesSource.Collector getLeafCollector(LeafReaderContext context, CompositeValuesSource.Collector in) throws IOException { int last = arrays.length - 1; CompositeValuesSource.Collector next = arrays[last].getLeafCollector(context, in); for (int i = last - 1; i >= 0; i--) { next = arrays[i].getLeafCollector(context, next); } return next; } }
qwerty4030/elasticsearch
server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesComparator.java
Java
apache-2.0
5,333
<?php App::uses('AppModel', 'Model'); App::uses('SimplePasswordHasher', 'Controller/Component/Auth'); class Product extends AppModel { }
katedoloverio/PCInventory
app/Model/Product.php
PHP
apache-2.0
138
class UsersController < ApplicationController include BatchProcessable # special find method before load_resource before_filter :build_user_with_proper_mission, :only => [:new, :create] # authorization via CanCan load_and_authorize_resource def index # sort and eager load @users = @users.by_name # if there is a search with the '.' character in it, we can't eager load due to a bug in Rails # this should be fixed in Rails 4 unless params[:search].present? && params[:search].match(/\./) @users = @users.with_assoc end # do search if applicable if params[:search].present? begin @users = User.do_search(@users, params[:search]) rescue Search::ParseError flash.now[:error] = $!.to_s @search_error = true end end end def new # set the default pref_lang based on the mission settings prepare_and_render_form end def show prepare_and_render_form end def edit prepare_and_render_form end def create if @user.save @user.reset_password_if_requested set_success(@user) # render printable instructions if requested handle_printable_instructions # if create failed, render the form again else flash.now[:error] = I18n.t('activerecord.errors.models.user.general') prepare_and_render_form end end def update # make sure changing assignment role is permitted if attempting authorize!(:change_assignments, @user) if params[:user]['assignments_attributes'] @user.assign_attributes(params[:user]) pref_lang_changed = @user.pref_lang_changed? if @user.save if @user == current_user I18n.locale = @user.pref_lang.to_sym if pref_lang_changed flash[:success] = t("user.profile_updated") redirect_to(:action => :edit) else set_success(@user) # if the user's password was reset, do it, and show instructions if requested @user.reset_password_if_requested handle_printable_instructions end # if save failed, render the form again else flash.now[:error] = I18n.t('activerecord.errors.models.user.general') prepare_and_render_form end end def destroy destroy_and_handle_errors(@user) redirect_to(index_url_with_page_num) end # shows printable login instructions for the user def login_instructions end # exports the selected users to VCF format def export respond_to do |format| format.vcf do @users = params[:selected] ? load_selected_objects(User) : [] render(:text => @users.collect{|u| u.to_vcf}.join("\n")) end end end def regenerate_key @user = User.find(params[:id]) @user.regenerate_api_key redirect_to(:action => :edit) end private # if we need to print instructions, redirects to the instructions action. otherwise redirects to index. def handle_printable_instructions if @user.reset_password_method == "print" # save the password in the flash since we won't be able to get it once it's crypted flash[:password] = @user.password redirect_to(:action => :login_instructions, :id => @user.id) else redirect_to(index_url_with_page_num) end end # prepares objects and renders the form template def prepare_and_render_form if admin_mode? # get assignable missons and roles for this user @assignments = @user.assignments.as_json(:include => :mission, :methods => :new_record?) @assignment_permissions = @user.assignments.map{|a| can?(:update, a)} @assignable_missions = Mission.accessible_by(current_ability, :assign_to).sorted_by_name.as_json(:only => [:id, :name]) @assignable_roles = Ability.assignable_roles(current_user) else @current_assignment = @user.assignments_by_mission[current_mission] || @user.assignments.build(:mission => current_mission) end render(:form) end # builds a user with an appropriate mission assignment if the current_user doesn't have permission to edit a blank user def build_user_with_proper_mission @user = User.new(params[:user]) if cannot?(:create, @user) && @user.assignments.empty? @user.assignments.build(:mission => current_mission) end end end
nmckahd/AHDBurundi
app/controllers/users_controller.rb
Ruby
apache-2.0
4,362
<?php final class PhabricatorProjectIcon extends Phobject { public static function getIconMap() { return array( 'fa-briefcase' => pht('Briefcase'), 'fa-tags' => pht('Tag'), 'fa-folder' => pht('Folder'), 'fa-users' => pht('Team'), 'fa-bug' => pht('Bug'), 'fa-trash-o' => pht('Garbage'), 'fa-calendar' => pht('Deadline'), 'fa-flag-checkered' => pht('Goal'), 'fa-envelope' => pht('Communication'), 'fa-truck' => pht('Release'), 'fa-lock' => pht('Policy'), 'fa-umbrella' => pht('An Umbrella'), 'fa-cloud' => pht('The Cloud'), 'fa-building' => pht('Company'), 'fa-credit-card' => pht('Accounting'), 'fa-flask' => pht('Experimental'), ); } public static function getColorMap() { $shades = PHUITagView::getShadeMap(); $shades = array_select_keys( $shades, array(PhabricatorProject::DEFAULT_COLOR)) + $shades; unset($shades[PHUITagView::COLOR_DISABLED]); return $shades; } public static function getLabel($key) { $map = self::getIconMap(); return $map[$key]; } public static function getAPIName($key) { return substr($key, 3); } public static function renderIconForChooser($icon) { $project_icons = PhabricatorProjectIcon::getIconMap(); return phutil_tag( 'span', array(), array( id(new PHUIIconView())->setIconFont($icon), ' ', idx($project_icons, $icon, pht('Unknown Icon')), )); } }
hach-que/unearth-phabricator
src/applications/project/icon/PhabricatorProjectIcon.php
PHP
apache-2.0
1,547
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tamaya.events; import org.apache.tamaya.ConfigException; import org.apache.tamaya.ConfigOperator; import org.apache.tamaya.ConfigQuery; import org.apache.tamaya.Configuration; import org.apache.tamaya.ConfigurationProvider; import org.apache.tamaya.TypeLiteral; import org.apache.tamaya.spi.PropertyConverter; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; /** * Created by Anatole on 24.03.2015. */ public class TestConfigView implements ConfigOperator{ private static final TestConfigView INSTANCE = new TestConfigView(); private TestConfigView(){} public static ConfigOperator of(){ return INSTANCE; } @Override public Configuration operate(final Configuration config) { return new Configuration() { @Override public Map<String, String> getProperties() { Map<String, String> result = new HashMap<>(); for (Map.Entry<String, String> en : config.getProperties().entrySet()) { if (en.getKey().startsWith("test")) { result.put(en.getKey(), en.getValue()); } } return result; // return config.getProperties().entrySet().stream().filter(e -> e.getKey().startsWith("test")).collect( // Collectors.toMap(en -> en.getKey(), en -> en.getValue())); } @Override public Configuration with(ConfigOperator operator) { return null; } @Override public <T> T query(ConfigQuery<T> query) { return null; } @Override public String get(String key) { return getProperties().get(key); } @Override public <T> T get(String key, Class<T> type) { return (T) get(key, TypeLiteral.of(type)); } /** * Accesses the current String value for the given key and tries to convert it * using the {@link org.apache.tamaya.spi.PropertyConverter} instances provided by the current * {@link org.apache.tamaya.spi.ConfigurationContext}. * * @param key the property's absolute, or relative path, e.g. @code * a/b/c/d.myProperty}. * @param type The target type required, not null. * @param <T> the value type * @return the converted value, never null. */ @Override public <T> T get(String key, TypeLiteral<T> type) { String value = get(key); if (value != null) { List<PropertyConverter<T>> converters = ConfigurationProvider.getConfigurationContext() .getPropertyConverters(type); for (PropertyConverter<T> converter : converters) { try { T t = converter.convert(value); if (t != null) { return t; } } catch (Exception e) { Logger.getLogger(getClass().getName()) .log(Level.FINEST, "PropertyConverter: " + converter + " failed to convert value: " + value, e); } } throw new ConfigException("Unparseable config value for type: " + type.getRawType().getName() + ": " + key); } return null; } }; } }
syzer/incubator-tamaya
modules/events/src/test/java/org/apache/tamaya/events/TestConfigView.java
Java
apache-2.0
4,643
<?php /*+*********************************************************************************** * The contents of this file are subject to the vtiger CRM Public License Version 1.0 * ("License"); You may not use this file except in compliance with the License * The Original Code is: vtiger CRM Open Source * The Initial Developer of the Original Code is vtiger. * Portions created by vtiger are Copyright (C) vtiger. * All Rights Reserved. *************************************************************************************/ class Products_Module_Model extends Vtiger_Module_Model { /** * Function to get list view query for popup window * @param <String> $sourceModule Parent module * @param <String> $field parent fieldname * @param <Integer> $record parent id * @param <String> $listQuery * @return <String> Listview Query */ public function getQueryByModuleField($sourceModule, $field, $record, $listQuery) { $supportedModulesList = array($this->getName(), 'Vendors', 'Leads', 'Accounts', 'Contacts', 'Potentials'); if (($sourceModule == 'PriceBooks' && $field == 'priceBookRelatedList') || in_array($sourceModule, $supportedModulesList) || in_array($sourceModule, getInventoryModules())) { $condition = " vtiger_products.discontinued = 1 "; if ($sourceModule === $this->getName()) { $condition .= " AND vtiger_products.productid NOT IN (SELECT productid FROM vtiger_seproductsrel WHERE crmid = '$record' UNION SELECT crmid FROM vtiger_seproductsrel WHERE productid = '$record') AND vtiger_products.productid <> '$record' "; } elseif ($sourceModule === 'PriceBooks') { $condition .= " AND vtiger_products.productid NOT IN (SELECT productid FROM vtiger_pricebookproductrel WHERE pricebookid = '$record') "; } elseif ($sourceModule === 'Vendors') { $condition .= " AND vtiger_products.vendor_id != '$record' "; } elseif (in_array($sourceModule, $supportedModulesList)) { $condition .= " AND vtiger_products.productid NOT IN (SELECT productid FROM vtiger_seproductsrel WHERE crmid = '$record')"; } $pos = stripos($listQuery, 'where'); if ($pos) { $split = spliti('where', $listQuery); $overRideQuery = $split[0] . ' WHERE ' . $split[1] . ' AND ' . $condition; } else { $overRideQuery = $listQuery. ' WHERE ' . $condition; } return $overRideQuery; } } /** * Function to get Specific Relation Query for this Module * @param <type> $relatedModule * @return <type> */ public function getSpecificRelationQuery($relatedModule) { if ($relatedModule === 'Leads') { $specificQuery = 'AND vtiger_leaddetails.converted = 0'; return $specificQuery; } return parent::getSpecificRelationQuery($relatedModule); } /** * Function to get prices for specified products with specific currency * @param <Integer> $currenctId * @param <Array> $productIdsList * @return <Array> */ public function getPricesForProducts($currencyId, $productIdsList) { return getPricesForProducts($currencyId, $productIdsList, $this->getName()); } /** * Function to check whether the module is summary view supported * @return <Boolean> - true/false */ public function isSummaryViewSupported() { return false; } /** * Function searches the records in the module, if parentId & parentModule * is given then searches only those records related to them. * @param <String> $searchValue - Search value * @param <Integer> $parentId - parent recordId * @param <String> $parentModule - parent module name * @return <Array of Vtiger_Record_Model> */ public function searchRecord($searchValue, $parentId=false, $parentModule=false, $relatedModule=false) { if(!empty($searchValue) && empty($parentId) && empty($parentModule) && (in_array($relatedModule, getInventoryModules()))) { $matchingRecords = Products_Record_Model::getSearchResult($searchValue, $this->getName()); }else { return parent::searchRecord($searchValue); } return $matchingRecords; } /** * Function returns query for Product-PriceBooks relation * @param <Vtiger_Record_Model> $recordModel * @param <Vtiger_Record_Model> $relatedModuleModel * @return <String> */ function get_product_pricebooks($recordModel, $relatedModuleModel) { $query = 'SELECT vtiger_pricebook.pricebookid, vtiger_pricebook.bookname, vtiger_pricebook.active, vtiger_crmentity.crmid, vtiger_crmentity.smownerid, vtiger_pricebookproductrel.listprice, vtiger_products.unit_price FROM vtiger_pricebook INNER JOIN vtiger_pricebookproductrel ON vtiger_pricebook.pricebookid = vtiger_pricebookproductrel.pricebookid INNER JOIN vtiger_crmentity on vtiger_crmentity.crmid = vtiger_pricebook.pricebookid INNER JOIN vtiger_products on vtiger_products.productid = vtiger_pricebookproductrel.productid INNER JOIN vtiger_pricebookcf on vtiger_pricebookcf.pricebookid = vtiger_pricebook.pricebookid LEFT JOIN vtiger_users ON vtiger_users.id=vtiger_crmentity.smownerid LEFT JOIN vtiger_groups ON vtiger_groups.groupid = vtiger_crmentity.smownerid ' . Users_Privileges_Model::getNonAdminAccessControlQuery($relatedModuleModel->getName()) .' WHERE vtiger_products.productid = '.$recordModel->getId().' and vtiger_crmentity.deleted = 0'; return $query; } }
basiljose1/byjcrm
modules/Products/models/Module.php
PHP
apache-2.0
5,293
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.functions.aggfunctions; import org.apache.flink.table.api.DataTypes; import org.apache.flink.table.expressions.Expression; import org.apache.flink.table.expressions.UnresolvedCallExpression; import org.apache.flink.table.expressions.UnresolvedReferenceExpression; import org.apache.flink.table.types.DataType; import org.apache.flink.table.types.logical.DecimalType; import org.apache.flink.table.types.logical.utils.LogicalTypeMerging; import java.math.BigDecimal; import static org.apache.flink.table.expressions.ApiExpressionUtils.unresolvedRef; import static org.apache.flink.table.planner.expressions.ExpressionBuilder.cast; import static org.apache.flink.table.planner.expressions.ExpressionBuilder.div; import static org.apache.flink.table.planner.expressions.ExpressionBuilder.equalTo; import static org.apache.flink.table.planner.expressions.ExpressionBuilder.ifThenElse; import static org.apache.flink.table.planner.expressions.ExpressionBuilder.isNull; import static org.apache.flink.table.planner.expressions.ExpressionBuilder.literal; import static org.apache.flink.table.planner.expressions.ExpressionBuilder.minus; import static org.apache.flink.table.planner.expressions.ExpressionBuilder.nullOf; import static org.apache.flink.table.planner.expressions.ExpressionBuilder.plus; import static org.apache.flink.table.planner.expressions.ExpressionBuilder.typeLiteral; /** built-in avg aggregate function. */ public abstract class AvgAggFunction extends DeclarativeAggregateFunction { private UnresolvedReferenceExpression sum = unresolvedRef("sum"); private UnresolvedReferenceExpression count = unresolvedRef("count"); public abstract DataType getSumType(); @Override public int operandCount() { return 1; } @Override public UnresolvedReferenceExpression[] aggBufferAttributes() { return new UnresolvedReferenceExpression[] {sum, count}; } @Override public DataType[] getAggBufferTypes() { return new DataType[] {getSumType(), DataTypes.BIGINT()}; } @Override public Expression[] initialValuesExpressions() { return new Expression[] { /* sum = */ literal(0L, getSumType().notNull()), /* count = */ literal(0L) }; } @Override public Expression[] accumulateExpressions() { return new Expression[] { /* sum = */ adjustSumType(ifThenElse(isNull(operand(0)), sum, plus(sum, operand(0)))), /* count = */ ifThenElse(isNull(operand(0)), count, plus(count, literal(1L))), }; } @Override public Expression[] retractExpressions() { return new Expression[] { /* sum = */ adjustSumType(ifThenElse(isNull(operand(0)), sum, minus(sum, operand(0)))), /* count = */ ifThenElse(isNull(operand(0)), count, minus(count, literal(1L))), }; } @Override public Expression[] mergeExpressions() { return new Expression[] { /* sum = */ adjustSumType(plus(sum, mergeOperand(sum))), /* count = */ plus(count, mergeOperand(count)) }; } private UnresolvedCallExpression adjustSumType(UnresolvedCallExpression sumExpr) { return cast(sumExpr, typeLiteral(getSumType())); } /** If all input are nulls, count will be 0 and we will get null after the division. */ @Override public Expression getValueExpression() { Expression ifTrue = nullOf(getResultType()); Expression ifFalse = cast(div(sum, count), typeLiteral(getResultType())); return ifThenElse(equalTo(count, literal(0L)), ifTrue, ifFalse); } /** Built-in Byte Avg aggregate function. */ public static class ByteAvgAggFunction extends AvgAggFunction { @Override public DataType getResultType() { return DataTypes.TINYINT(); } @Override public DataType getSumType() { return DataTypes.BIGINT(); } } /** Built-in Short Avg aggregate function. */ public static class ShortAvgAggFunction extends AvgAggFunction { @Override public DataType getResultType() { return DataTypes.SMALLINT(); } @Override public DataType getSumType() { return DataTypes.BIGINT(); } } /** Built-in Integer Avg aggregate function. */ public static class IntAvgAggFunction extends AvgAggFunction { @Override public DataType getResultType() { return DataTypes.INT(); } @Override public DataType getSumType() { return DataTypes.BIGINT(); } } /** Built-in Long Avg aggregate function. */ public static class LongAvgAggFunction extends AvgAggFunction { @Override public DataType getResultType() { return DataTypes.BIGINT(); } @Override public DataType getSumType() { return DataTypes.BIGINT(); } } /** Built-in Float Avg aggregate function. */ public static class FloatAvgAggFunction extends AvgAggFunction { @Override public DataType getResultType() { return DataTypes.FLOAT(); } @Override public DataType getSumType() { return DataTypes.DOUBLE(); } @Override public Expression[] initialValuesExpressions() { return new Expression[] {literal(0D), literal(0L)}; } } /** Built-in Double Avg aggregate function. */ public static class DoubleAvgAggFunction extends AvgAggFunction { @Override public DataType getResultType() { return DataTypes.DOUBLE(); } @Override public DataType getSumType() { return DataTypes.DOUBLE(); } @Override public Expression[] initialValuesExpressions() { return new Expression[] {literal(0D), literal(0L)}; } } /** Built-in Decimal Avg aggregate function. */ public static class DecimalAvgAggFunction extends AvgAggFunction { private final DecimalType type; public DecimalAvgAggFunction(DecimalType type) { this.type = type; } @Override public DataType getResultType() { DecimalType t = (DecimalType) LogicalTypeMerging.findAvgAggType(type); return DataTypes.DECIMAL(t.getPrecision(), t.getScale()); } @Override public DataType getSumType() { DecimalType t = (DecimalType) LogicalTypeMerging.findSumAggType(type); return DataTypes.DECIMAL(t.getPrecision(), t.getScale()); } @Override public Expression[] initialValuesExpressions() { return new Expression[] {literal(BigDecimal.ZERO, getSumType().notNull()), literal(0L)}; } } }
StephanEwen/incubator-flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/AvgAggFunction.java
Java
apache-2.0
7,723
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Defines relational expressions and rules for converting between calling * conventions. */ package org.apache.calcite.rel.convert; // End package-info.java
mehant/incubator-calcite
core/src/main/java/org/apache/calcite/rel/convert/package-info.java
Java
apache-2.0
964
/*Copyright (C) 2012 Longerian (http://www.longerian.me) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.*/ package org.rubychina.android; import java.util.ArrayList; import java.util.List; import org.rubychina.android.type.Node; import org.rubychina.android.type.SiteGroup; import org.rubychina.android.type.Topic; import org.rubychina.android.type.User; public enum GlobalResource { INSTANCE; private List<Topic> curTopics = new ArrayList<Topic>(); private List<Node> nodes = new ArrayList<Node>(); private List<User> users = new ArrayList<User>(); private List<SiteGroup> sites = new ArrayList<SiteGroup>(); public synchronized List<Topic> getCurTopics() { return curTopics; } public synchronized void setCurTopics(List<Topic> curTopics) { this.curTopics = curTopics; } public synchronized List<Node> getNodes() { return nodes; } public synchronized void setNodes(List<Node> nodes) { this.nodes = nodes; } public synchronized List<User> getUsers() { return users; } public synchronized void setUsers(List<User> users) { this.users = users; } public synchronized List<SiteGroup> getSites() { return sites; } public synchronized void setSites(List<SiteGroup> sites) { this.sites = sites; } }
longerian/RC4A
src/org/rubychina/android/GlobalResource.java
Java
apache-2.0
1,797
package set import ( "encoding/json" "errors" "fmt" "io" "os" "path" "strings" "github.com/golang/glog" "github.com/spf13/cobra" kapi "k8s.io/kubernetes/pkg/api" apierrs "k8s.io/kubernetes/pkg/api/errors" "k8s.io/kubernetes/pkg/api/meta" kresource "k8s.io/kubernetes/pkg/api/resource" "k8s.io/kubernetes/pkg/api/unversioned" kclient "k8s.io/kubernetes/pkg/client/unversioned" kcmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util" "k8s.io/kubernetes/pkg/kubectl/resource" "k8s.io/kubernetes/pkg/labels" "k8s.io/kubernetes/pkg/runtime" "github.com/openshift/origin/pkg/cmd/templates" cmdutil "github.com/openshift/origin/pkg/cmd/util" "github.com/openshift/origin/pkg/cmd/util/clientcmd" ) const ( volumePrefix = "volume-" storageAnnClass = "volume.beta.kubernetes.io/storage-class" ) var ( volumeLong = templates.LongDesc(` Update volumes on a pod template This command can add, update or remove volumes from containers for any object that has a pod template (deployment configs, replication controllers, or pods). You can list volumes in pod or any object that has a pod template. You can specify a single object or multiple, and alter volumes on all containers or just those that match a given name. If you alter a volume setting on a deployment config, a deployment will be triggered. Changing a replication controller will not affect running pods, and you cannot change a pod's volumes once it has been created. Volume types include: * emptydir (empty directory) *default*: A directory allocated when the pod is created on a local host, is removed when the pod is deleted and is not copied across servers * hostdir (host directory): A directory with specific path on any host (requires elevated privileges) * persistentvolumeclaim or pvc (persistent volume claim): Link the volume directory in the container to a persistent volume claim you have allocated by name - a persistent volume claim is a request to allocate storage. Note that if your claim hasn't been bound, your pods will not start. * secret (mounted secret): Secret volumes mount a named secret to the provided directory. For descriptions on other volume types, see https://docs.openshift.com`) volumeExample = templates.Examples(` # List volumes defined on all deployment configs in the current project %[1]s volume dc --all # Add a new empty dir volume to deployment config (dc) 'registry' mounted under # /var/lib/registry %[1]s volume dc/registry --add --mount-path=/var/lib/registry # Use an existing persistent volume claim (pvc) to overwrite an existing volume 'v1' %[1]s volume dc/registry --add --name=v1 -t pvc --claim-name=pvc1 --overwrite # Remove volume 'v1' from deployment config 'registry' %[1]s volume dc/registry --remove --name=v1 # Create a new persistent volume claim that overwrites an existing volume 'v1' %[1]s volume dc/registry --add --name=v1 -t pvc --claim-size=1G --overwrite # Change the mount point for volume 'v1' to /data %[1]s volume dc/registry --add --name=v1 -m /data --overwrite # Modify the deployment config by removing volume mount "v1" from container "c1" # (and by removing the volume "v1" if no other containers have volume mounts that reference it) %[1]s volume dc/registry --remove --name=v1 --containers=c1 # Add new volume based on a more complex volume source (Git repo, AWS EBS, GCE PD, # Ceph, Gluster, NFS, ISCSI, ...) %[1]s volume dc/registry --add -m /repo --source=<json-string>`) ) type VolumeOptions struct { DefaultNamespace string ExplicitNamespace bool Out io.Writer Err io.Writer Mapper meta.RESTMapper Typer runtime.ObjectTyper RESTClientFactory func(mapping *meta.RESTMapping) (resource.RESTClient, error) UpdatePodSpecForObject func(obj runtime.Object, fn func(*kapi.PodSpec) error) (bool, error) Client kclient.PersistentVolumeClaimsNamespacer Encoder runtime.Encoder // Resource selection Selector string All bool Filenames []string // Operations Add bool Remove bool List bool // Common optional params Name string Containers string Confirm bool Output string PrintObject func([]*resource.Info) error OutputVersion unversioned.GroupVersion // Add op params AddOpts *AddVolumeOptions } type AddVolumeOptions struct { Type string MountPath string Overwrite bool Path string ConfigMapName string SecretName string Source string CreateClaim bool ClaimName string ClaimSize string ClaimMode string ClaimClass string TypeChanged bool } func NewCmdVolume(fullName string, f *clientcmd.Factory, out, errOut io.Writer) *cobra.Command { addOpts := &AddVolumeOptions{} opts := &VolumeOptions{AddOpts: addOpts} cmd := &cobra.Command{ Use: "volumes RESOURCE/NAME --add|--remove|--list", Short: "Update volumes on a pod template", Long: volumeLong, Example: fmt.Sprintf(volumeExample, fullName), Aliases: []string{"volume"}, Run: func(cmd *cobra.Command, args []string) { addOpts.TypeChanged = cmd.Flag("type").Changed err := opts.Validate(cmd, args) if err != nil { kcmdutil.CheckErr(kcmdutil.UsageError(cmd, err.Error())) } err = opts.Complete(f, cmd, out, errOut) kcmdutil.CheckErr(err) err = opts.RunVolume(args) if err == cmdutil.ErrExit { os.Exit(1) } kcmdutil.CheckErr(err) }, } cmd.Flags().StringVarP(&opts.Selector, "selector", "l", "", "Selector (label query) to filter on") cmd.Flags().BoolVar(&opts.All, "all", false, "select all resources in the namespace of the specified resource types") cmd.Flags().StringSliceVarP(&opts.Filenames, "filename", "f", opts.Filenames, "Filename, directory, or URL to file to use to edit the resource.") cmd.Flags().BoolVar(&opts.Add, "add", false, "Add volume and/or volume mounts for containers") cmd.Flags().BoolVar(&opts.Remove, "remove", false, "Remove volume and/or volume mounts for containers") cmd.Flags().BoolVar(&opts.List, "list", false, "List volumes and volume mounts for containers") cmd.Flags().StringVar(&opts.Name, "name", "", "Name of the volume. If empty, auto generated for add operation") cmd.Flags().StringVarP(&opts.Containers, "containers", "c", "*", "The names of containers in the selected pod templates to change - may use wildcards") cmd.Flags().BoolVar(&opts.Confirm, "confirm", false, "Confirm that you really want to remove multiple volumes") cmd.Flags().StringVarP(&addOpts.Type, "type", "t", "", "Type of the volume source for add operation. Supported options: emptyDir, hostPath, secret, configmap, persistentVolumeClaim") cmd.Flags().StringVarP(&addOpts.MountPath, "mount-path", "m", "", "Mount path inside the container. Optional param for --add or --remove") cmd.Flags().BoolVar(&addOpts.Overwrite, "overwrite", false, "If true, replace existing volume source and/or volume mount for the given resource") cmd.Flags().StringVar(&addOpts.Path, "path", "", "Host path. Must be provided for hostPath volume type") cmd.Flags().StringVar(&addOpts.ConfigMapName, "configmap-name", "", "Name of the persisted config map. Must be provided for configmap volume type") cmd.Flags().StringVar(&addOpts.SecretName, "secret-name", "", "Name of the persisted secret. Must be provided for secret volume type") cmd.Flags().StringVar(&addOpts.ClaimName, "claim-name", "", "Persistent volume claim name. Must be provided for persistentVolumeClaim volume type") cmd.Flags().StringVar(&addOpts.ClaimClass, "claim-class", "", "StorageClass to use for the persistent volume claim") cmd.Flags().StringVar(&addOpts.ClaimSize, "claim-size", "", "If specified along with a persistent volume type, create a new claim with the given size in bytes. Accepts SI notation: 10, 10G, 10Gi") cmd.Flags().StringVar(&addOpts.ClaimMode, "claim-mode", "ReadWriteOnce", "Set the access mode of the claim to be created. Valid values are ReadWriteOnce (rwo), ReadWriteMany (rwm), or ReadOnlyMany (rom)") cmd.Flags().StringVar(&addOpts.Source, "source", "", "Details of volume source as json string. This can be used if the required volume type is not supported by --type option. (e.g.: '{\"gitRepo\": {\"repository\": <git-url>, \"revision\": <commit-hash>}}')") kcmdutil.AddPrinterFlags(cmd) cmd.MarkFlagFilename("filename", "yaml", "yml", "json") // deprecate --list option cmd.Flags().MarkDeprecated("list", "Volumes and volume mounts can be listed by providing a resource with no additional options.") return cmd } func (v *VolumeOptions) Validate(cmd *cobra.Command, args []string) error { if len(v.Selector) > 0 { if _, err := labels.Parse(v.Selector); err != nil { return errors.New("--selector=<selector> must be a valid label selector") } if v.All { return errors.New("you may specify either --selector or --all but not both") } } if len(v.Filenames) == 0 && len(args) < 1 { return errors.New("provide one or more resources to add, list, or delete volumes on as TYPE/NAME") } numOps := 0 if v.Add { numOps++ } if v.Remove { numOps++ } if v.List { numOps++ } switch { case numOps == 0: v.List = true case numOps > 1: return errors.New("you may only specify one operation at a time") } output := kcmdutil.GetFlagString(cmd, "output") if v.List && len(output) > 0 { return errors.New("--list and --output may not be specified together") } err := v.AddOpts.Validate(v.Add) if err != nil { return err } // Removing all volumes for the resource type needs confirmation if v.Remove && len(v.Name) == 0 && !v.Confirm { return errors.New("must provide --confirm for removing more than one volume") } return nil } func (a *AddVolumeOptions) Validate(isAddOp bool) error { if isAddOp { if len(a.Type) == 0 && (len(a.ClaimName) > 0 || len(a.ClaimSize) > 0) { a.Type = "persistentvolumeclaim" a.TypeChanged = true } if len(a.Type) == 0 && (len(a.SecretName) > 0) { a.Type = "secret" a.TypeChanged = true } if len(a.Type) == 0 && (len(a.ConfigMapName) > 0) { a.Type = "configmap" a.TypeChanged = true } if len(a.Type) == 0 && (len(a.Path) > 0) { a.Type = "hostpath" a.TypeChanged = true } if len(a.Type) == 0 { a.Type = "emptydir" } if len(a.Type) == 0 && len(a.Source) == 0 { return errors.New("must provide --type or --source for --add operation") } else if a.TypeChanged && len(a.Source) > 0 { return errors.New("either specify --type or --source but not both for --add operation") } if len(a.Type) > 0 { switch strings.ToLower(a.Type) { case "emptydir": case "hostpath": if len(a.Path) == 0 { return errors.New("must provide --path for --type=hostPath") } case "secret": if len(a.SecretName) == 0 { return errors.New("must provide --secret-name for --type=secret") } case "configmap": if len(a.ConfigMapName) == 0 { return errors.New("must provide --configmap-name for --type=configmap") } case "persistentvolumeclaim", "pvc": if len(a.ClaimName) == 0 && len(a.ClaimSize) == 0 { return errors.New("must provide --claim-name or --claim-size (to create a new claim) for --type=pvc") } default: return errors.New("invalid volume type. Supported types: emptyDir, hostPath, secret, persistentVolumeClaim") } } else if len(a.Path) > 0 || len(a.SecretName) > 0 || len(a.ClaimName) > 0 { return errors.New("--path|--secret-name|--claim-name are only valid for --type option") } if len(a.Source) > 0 { var source map[string]interface{} err := json.Unmarshal([]byte(a.Source), &source) if err != nil { return err } if len(source) > 1 { return errors.New("must provide only one volume for --source") } var vs kapi.VolumeSource err = json.Unmarshal([]byte(a.Source), &vs) if err != nil { return err } } if len(a.ClaimClass) > 0 { selectedLowerType := strings.ToLower(a.Type) if selectedLowerType != "persistentvolumeclaim" && selectedLowerType != "pvc" { return errors.New("must provide --type as persistentVolumeClaim") } if len(a.ClaimSize) == 0 { return errors.New("must provide --claim-size to create new pvc with claim-class") } } } else if len(a.Source) > 0 || len(a.Path) > 0 || len(a.SecretName) > 0 || len(a.ConfigMapName) > 0 || len(a.ClaimName) > 0 || a.Overwrite { return errors.New("--type|--path|--configmap-name|--secret-name|--claim-name|--source|--overwrite are only valid for --add operation") } return nil } func (v *VolumeOptions) Complete(f *clientcmd.Factory, cmd *cobra.Command, out, errOut io.Writer) error { clientConfig, err := f.ClientConfig() if err != nil { return err } v.OutputVersion, err = kcmdutil.OutputVersion(cmd, clientConfig.GroupVersion) if err != nil { return err } _, kc, err := f.Clients() if err != nil { return err } v.Client = kc cmdNamespace, explicit, err := f.DefaultNamespace() if err != nil { return err } mapper, typer := f.Object(false) v.Output = kcmdutil.GetFlagString(cmd, "output") if len(v.Output) > 0 { v.PrintObject = func(infos []*resource.Info) error { return f.PrintResourceInfos(cmd, infos, v.Out) } } v.DefaultNamespace = cmdNamespace v.ExplicitNamespace = explicit v.Out = out v.Err = errOut v.Mapper = mapper v.Typer = typer v.RESTClientFactory = f.Factory.ClientForMapping v.UpdatePodSpecForObject = f.UpdatePodSpecForObject v.Encoder = f.JSONEncoder() // In case of volume source ignore the default volume type if len(v.AddOpts.Source) > 0 { v.AddOpts.Type = "" } if len(v.AddOpts.ClaimSize) > 0 { v.AddOpts.CreateClaim = true if len(v.AddOpts.ClaimName) == 0 { v.AddOpts.ClaimName = kapi.SimpleNameGenerator.GenerateName("pvc-") } q, err := kresource.ParseQuantity(v.AddOpts.ClaimSize) if err != nil { return fmt.Errorf("--claim-size is not valid: %v", err) } v.AddOpts.ClaimSize = q.String() } switch strings.ToLower(v.AddOpts.ClaimMode) { case strings.ToLower(string(kapi.ReadOnlyMany)), "rom": v.AddOpts.ClaimMode = string(kapi.ReadOnlyMany) case strings.ToLower(string(kapi.ReadWriteOnce)), "rwo": v.AddOpts.ClaimMode = string(kapi.ReadWriteOnce) case strings.ToLower(string(kapi.ReadWriteMany)), "rwm": v.AddOpts.ClaimMode = string(kapi.ReadWriteMany) case "": default: return errors.New("--claim-mode must be one of ReadWriteOnce (rwo), ReadWriteMany (rwm), or ReadOnlyMany (rom)") } return nil } func (v *VolumeOptions) RunVolume(args []string) error { mapper := resource.ClientMapperFunc(v.RESTClientFactory) b := resource.NewBuilder(v.Mapper, v.Typer, mapper, kapi.Codecs.UniversalDecoder()). ContinueOnError(). NamespaceParam(v.DefaultNamespace).DefaultNamespace(). FilenameParam(v.ExplicitNamespace, false, v.Filenames...). SelectorParam(v.Selector). ResourceTypeOrNameArgs(v.All, args...). Flatten() singular := false infos, err := b.Do().IntoSingular(&singular).Infos() if err != nil { return err } if v.List { listingErrors := v.printVolumes(infos) if len(listingErrors) > 0 { return cmdutil.ErrExit } return nil } updateInfos := []*resource.Info{} // if a claim should be created, generate the info we'll add to the flow if v.Add && v.AddOpts.CreateClaim { claim := v.AddOpts.createClaim() m, err := v.Mapper.RESTMapping(kapi.Kind("PersistentVolumeClaim")) if err != nil { return err } client, err := mapper.ClientForMapping(m) if err != nil { return err } info := &resource.Info{ Mapping: m, Client: client, Namespace: v.DefaultNamespace, Object: claim, } infos = append(infos, info) updateInfos = append(updateInfos, info) } patches, patchError := v.getVolumeUpdatePatches(infos, singular) if patchError != nil { return patchError } if v.PrintObject != nil { return v.PrintObject(infos) } failed := false for _, info := range updateInfos { var obj runtime.Object if len(info.ResourceVersion) == 0 { obj, err = resource.NewHelper(info.Client, info.Mapping).Create(info.Namespace, false, info.Object) } else { obj, err = resource.NewHelper(info.Client, info.Mapping).Replace(info.Namespace, info.Name, true, info.Object) } if err != nil { handlePodUpdateError(v.Err, err, "volume") failed = true continue } info.Refresh(obj, true) fmt.Fprintf(v.Out, "%s/%s\n", info.Mapping.Resource, info.Name) } for _, patch := range patches { info := patch.Info if patch.Err != nil { failed = true fmt.Fprintf(v.Err, "error: %s/%s %v\n", info.Mapping.Resource, info.Name, patch.Err) continue } if string(patch.Patch) == "{}" || len(patch.Patch) == 0 { fmt.Fprintf(v.Err, "info: %s %q was not changed\n", info.Mapping.Resource, info.Name) continue } glog.V(4).Infof("Calculated patch %s", patch.Patch) obj, err := resource.NewHelper(info.Client, info.Mapping).Patch(info.Namespace, info.Name, kapi.StrategicMergePatchType, patch.Patch) if err != nil { handlePodUpdateError(v.Err, err, "volume") failed = true continue } info.Refresh(obj, true) kcmdutil.PrintSuccess(v.Mapper, false, v.Out, info.Mapping.Resource, info.Name, false, "updated") } if failed { return cmdutil.ErrExit } return nil } func (v *VolumeOptions) getVolumeUpdatePatches(infos []*resource.Info, singular bool) ([]*Patch, error) { skipped := 0 patches := CalculatePatches(infos, v.Encoder, func(info *resource.Info) (bool, error) { transformed := false ok, err := v.UpdatePodSpecForObject(info.Object, func(spec *kapi.PodSpec) error { var e error switch { case v.Add: e = v.addVolumeToSpec(spec, info, singular) transformed = true case v.Remove: e = v.removeVolumeFromSpec(spec, info) transformed = true } return e }) if !ok { skipped++ } return transformed, err }) if singular && skipped == len(infos) { patchError := fmt.Errorf("the %s %s is not a pod or does not have a pod template", infos[0].Mapping.Resource, infos[0].Name) return patches, patchError } return patches, nil } func setVolumeSourceByType(kv *kapi.Volume, opts *AddVolumeOptions) error { switch strings.ToLower(opts.Type) { case "emptydir": kv.EmptyDir = &kapi.EmptyDirVolumeSource{} case "hostpath": kv.HostPath = &kapi.HostPathVolumeSource{ Path: opts.Path, } case "secret": kv.Secret = &kapi.SecretVolumeSource{ SecretName: opts.SecretName, } case "configmap": kv.ConfigMap = &kapi.ConfigMapVolumeSource{ LocalObjectReference: kapi.LocalObjectReference{ Name: opts.ConfigMapName, }, } case "persistentvolumeclaim", "pvc": kv.PersistentVolumeClaim = &kapi.PersistentVolumeClaimVolumeSource{ ClaimName: opts.ClaimName, } default: return fmt.Errorf("invalid volume type: %s", opts.Type) } return nil } func (v *VolumeOptions) printVolumes(infos []*resource.Info) []error { listingErrors := []error{} for _, info := range infos { _, err := v.UpdatePodSpecForObject(info.Object, func(spec *kapi.PodSpec) error { return v.listVolumeForSpec(spec, info) }) if err != nil { listingErrors = append(listingErrors, err) fmt.Fprintf(v.Err, "error: %s/%s %v\n", info.Mapping.Resource, info.Name, err) } } return listingErrors } func (v *AddVolumeOptions) createClaim() *kapi.PersistentVolumeClaim { pvc := &kapi.PersistentVolumeClaim{ ObjectMeta: kapi.ObjectMeta{ Name: v.ClaimName, }, Spec: kapi.PersistentVolumeClaimSpec{ AccessModes: []kapi.PersistentVolumeAccessMode{kapi.PersistentVolumeAccessMode(v.ClaimMode)}, Resources: kapi.ResourceRequirements{ Requests: kapi.ResourceList{ kapi.ResourceName(kapi.ResourceStorage): kresource.MustParse(v.ClaimSize), }, }, }, } if len(v.ClaimClass) > 0 { pvc.Annotations = map[string]string{ storageAnnClass: v.ClaimClass, } } return pvc } func (v *VolumeOptions) setVolumeSource(kv *kapi.Volume) error { var err error opts := v.AddOpts if len(opts.Type) > 0 { err = setVolumeSourceByType(kv, opts) } else if len(opts.Source) > 0 { err = json.Unmarshal([]byte(opts.Source), &kv.VolumeSource) } return err } func (v *VolumeOptions) setVolumeMount(spec *kapi.PodSpec, info *resource.Info) error { opts := v.AddOpts containers, _ := selectContainers(spec.Containers, v.Containers) if len(containers) == 0 && v.Containers != "*" { fmt.Fprintf(v.Err, "warning: %s/%s does not have any containers matching %q\n", info.Mapping.Resource, info.Name, v.Containers) return nil } for _, c := range containers { for _, m := range c.VolumeMounts { if path.Clean(m.MountPath) == path.Clean(opts.MountPath) && m.Name != v.Name { return fmt.Errorf("volume mount '%s' already exists for container '%s'", opts.MountPath, c.Name) } } for i, m := range c.VolumeMounts { if m.Name == v.Name { c.VolumeMounts = append(c.VolumeMounts[:i], c.VolumeMounts[i+1:]...) break } } volumeMount := &kapi.VolumeMount{ Name: v.Name, MountPath: path.Clean(opts.MountPath), } c.VolumeMounts = append(c.VolumeMounts, *volumeMount) } return nil } func (v *VolumeOptions) getVolumeName(spec *kapi.PodSpec, singleResource bool) (string, error) { opts := v.AddOpts if opts.Overwrite { // Multiple resources can have same mount-path for different volumes, // so restrict it for single resource to uniquely find the volume if !singleResource { return "", fmt.Errorf("you must specify --name for the volume name when dealing with multiple resources") } if len(opts.MountPath) > 0 { containers, _ := selectContainers(spec.Containers, v.Containers) var name string matchCount := 0 for _, c := range containers { for _, m := range c.VolumeMounts { if path.Clean(m.MountPath) == path.Clean(opts.MountPath) { name = m.Name matchCount += 1 break } } } switch matchCount { case 0: return "", fmt.Errorf("unable to find the volume for mount-path: %s", opts.MountPath) case 1: return name, nil default: return "", fmt.Errorf("found multiple volumes with same mount-path: %s", opts.MountPath) } } else { return "", fmt.Errorf("ambiguous --overwrite, specify --name or --mount-path") } } else { // Generate volume name name := kapi.SimpleNameGenerator.GenerateName(volumePrefix) if len(v.Output) == 0 { fmt.Fprintf(v.Err, "info: Generated volume name: %s\n", name) } return name, nil } } func (v *VolumeOptions) addVolumeToSpec(spec *kapi.PodSpec, info *resource.Info, singleResource bool) error { opts := v.AddOpts if len(v.Name) == 0 { var err error v.Name, err = v.getVolumeName(spec, singleResource) if err != nil { return err } } newVolume := &kapi.Volume{ Name: v.Name, } setSource := true for i, vol := range spec.Volumes { if v.Name == vol.Name { if !opts.Overwrite { return fmt.Errorf("volume '%s' already exists. Use --overwrite to replace", v.Name) } if !opts.TypeChanged && len(opts.Source) == 0 { newVolume.VolumeSource = vol.VolumeSource setSource = false } spec.Volumes = append(spec.Volumes[:i], spec.Volumes[i+1:]...) break } } if setSource { err := v.setVolumeSource(newVolume) if err != nil { return err } } spec.Volumes = append(spec.Volumes, *newVolume) if len(opts.MountPath) > 0 { err := v.setVolumeMount(spec, info) if err != nil { return err } } return nil } func (v *VolumeOptions) removeSpecificVolume(spec *kapi.PodSpec, containers, skippedContainers []*kapi.Container) error { for _, c := range containers { for i, m := range c.VolumeMounts { if v.Name == m.Name { c.VolumeMounts = append(c.VolumeMounts[:i], c.VolumeMounts[i+1:]...) break } } } // Remove volume if no container is using it found := false for _, c := range skippedContainers { for _, m := range c.VolumeMounts { if v.Name == m.Name { found = true break } } if found { break } } if !found { foundVolume := false for i, vol := range spec.Volumes { if v.Name == vol.Name { spec.Volumes = append(spec.Volumes[:i], spec.Volumes[i+1:]...) foundVolume = true break } } if !foundVolume { return fmt.Errorf("volume '%s' not found", v.Name) } } return nil } func (v *VolumeOptions) removeVolumeFromSpec(spec *kapi.PodSpec, info *resource.Info) error { containers, skippedContainers := selectContainers(spec.Containers, v.Containers) if len(containers) == 0 && v.Containers != "*" { fmt.Fprintf(v.Err, "warning: %s/%s does not have any containers matching %q\n", info.Mapping.Resource, info.Name, v.Containers) return nil } if len(v.Name) == 0 { for _, c := range containers { c.VolumeMounts = []kapi.VolumeMount{} } spec.Volumes = []kapi.Volume{} } else { err := v.removeSpecificVolume(spec, containers, skippedContainers) if err != nil { return err } } return nil } func sourceAccessMode(readOnly bool) string { if readOnly { return " read-only" } return "" } func describePersistentVolumeClaim(claim *kapi.PersistentVolumeClaim) string { if len(claim.Spec.VolumeName) == 0 { // TODO: check for other dimensions of request - IOPs, etc if val, ok := claim.Spec.Resources.Requests[kapi.ResourceStorage]; ok { return fmt.Sprintf("waiting for %sB allocation", val.String()) } return "waiting to allocate" } // TODO: check for other dimensions of capacity? if val, ok := claim.Status.Capacity[kapi.ResourceStorage]; ok { return fmt.Sprintf("allocated %sB", val.String()) } return "allocated unknown size" } func describeVolumeSource(source *kapi.VolumeSource) string { switch { case source.AWSElasticBlockStore != nil: return fmt.Sprintf("AWS EBS %s type=%s partition=%d%s", source.AWSElasticBlockStore.VolumeID, source.AWSElasticBlockStore.FSType, source.AWSElasticBlockStore.Partition, sourceAccessMode(source.AWSElasticBlockStore.ReadOnly)) case source.EmptyDir != nil: return "empty directory" case source.GCEPersistentDisk != nil: return fmt.Sprintf("GCE PD %s type=%s partition=%d%s", source.GCEPersistentDisk.PDName, source.GCEPersistentDisk.FSType, source.GCEPersistentDisk.Partition, sourceAccessMode(source.GCEPersistentDisk.ReadOnly)) case source.GitRepo != nil: if len(source.GitRepo.Revision) == 0 { return fmt.Sprintf("Git repository %s", source.GitRepo.Repository) } return fmt.Sprintf("Git repository %s @ %s", source.GitRepo.Repository, source.GitRepo.Revision) case source.Glusterfs != nil: return fmt.Sprintf("GlusterFS %s:%s%s", source.Glusterfs.EndpointsName, source.Glusterfs.Path, sourceAccessMode(source.Glusterfs.ReadOnly)) case source.HostPath != nil: return fmt.Sprintf("host path %s", source.HostPath.Path) case source.ISCSI != nil: return fmt.Sprintf("ISCSI %s target-portal=%s type=%s lun=%d%s", source.ISCSI.IQN, source.ISCSI.TargetPortal, source.ISCSI.FSType, source.ISCSI.Lun, sourceAccessMode(source.ISCSI.ReadOnly)) case source.NFS != nil: return fmt.Sprintf("NFS %s:%s%s", source.NFS.Server, source.NFS.Path, sourceAccessMode(source.NFS.ReadOnly)) case source.PersistentVolumeClaim != nil: return fmt.Sprintf("pvc/%s%s", source.PersistentVolumeClaim.ClaimName, sourceAccessMode(source.PersistentVolumeClaim.ReadOnly)) case source.RBD != nil: return fmt.Sprintf("Ceph RBD %v type=%s image=%s pool=%s%s", source.RBD.CephMonitors, source.RBD.FSType, source.RBD.RBDImage, source.RBD.RBDPool, sourceAccessMode(source.RBD.ReadOnly)) case source.Secret != nil: return fmt.Sprintf("secret/%s", source.Secret.SecretName) default: return "unknown" } } func (v *VolumeOptions) listVolumeForSpec(spec *kapi.PodSpec, info *resource.Info) error { containers, _ := selectContainers(spec.Containers, v.Containers) if len(containers) == 0 && v.Containers != "*" { fmt.Fprintf(v.Err, "warning: %s/%s does not have any containers matching %q\n", info.Mapping.Resource, info.Name, v.Containers) return nil } fmt.Fprintf(v.Out, "%s/%s\n", info.Mapping.Resource, info.Name) checkName := (len(v.Name) > 0) found := false for _, vol := range spec.Volumes { if checkName && v.Name != vol.Name { continue } found = true refInfo := "" if vol.VolumeSource.PersistentVolumeClaim != nil { claimName := vol.VolumeSource.PersistentVolumeClaim.ClaimName claim, err := v.Client.PersistentVolumeClaims(info.Namespace).Get(claimName) switch { case err == nil: refInfo = fmt.Sprintf("(%s)", describePersistentVolumeClaim(claim)) case apierrs.IsNotFound(err): refInfo = "(does not exist)" default: fmt.Fprintf(v.Err, "error: unable to retrieve persistent volume claim %s referenced in %s/%s: %v", claimName, info.Mapping.Resource, info.Name, err) } } if len(refInfo) > 0 { refInfo = " " + refInfo } fmt.Fprintf(v.Out, " %s%s as %s\n", describeVolumeSource(&vol.VolumeSource), refInfo, vol.Name) for _, c := range containers { for _, m := range c.VolumeMounts { if vol.Name != m.Name { continue } if len(spec.Containers) == 1 { fmt.Fprintf(v.Out, " mounted at %s\n", m.MountPath) } else { fmt.Fprintf(v.Out, " mounted at %s in container %s\n", m.MountPath, c.Name) } } } } if checkName && !found { return fmt.Errorf("volume %q not found", v.Name) } return nil }
jeffvance/origin
pkg/cmd/cli/cmd/set/volume.go
GO
apache-2.0
29,411
/* * Copyright 2011 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.widgets.client.widget; public class PercentageCalculator { public static int calculatePercent(int numerator, int denominator) { int percent = 0; if (denominator != 0) { percent = (int) ((((float) denominator - (float) numerator) / (float) denominator) * 100); } return percent; } }
psiroky/kie-wb-common
kie-wb-common-widgets/kie-wb-common-ui/src/main/java/org/kie/workbench/common/widgets/client/widget/PercentageCalculator.java
Java
apache-2.0
979
/* * Copyright 2015 DuraSpace, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fcrepo.http.api; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; /** * @author cabeer * @since 10/17/14 */ @Component public class FedoraHttpConfiguration { @Value("${fcrepo.http.ldp.putRequiresIfMatch:false}") private boolean putRequiresIfMatch; /** * Should PUT requests require an If-Match header? * @return put request if match */ public boolean putRequiresIfMatch() { return putRequiresIfMatch; } }
ruebot/fcrepo4
fcrepo-http-api/src/main/java/org/fcrepo/http/api/FedoraHttpConfiguration.java
Java
apache-2.0
1,125
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import webob from nova.api.openstack.compute import flavors as flavors_api from nova.api.openstack.compute.views import flavors as flavors_view from nova.api.openstack import extensions from nova.api.openstack import wsgi from nova.compute import flavors from nova import exception from nova.openstack.common.gettextutils import _ authorize = extensions.extension_authorizer('compute', 'flavormanage') class FlavorManageController(wsgi.Controller): """ The Flavor Lifecycle API controller for the OpenStack API. """ _view_builder_class = flavors_view.ViewBuilder def __init__(self): super(FlavorManageController, self).__init__() @wsgi.action("delete") def _delete(self, req, id): context = req.environ['nova.context'] authorize(context) try: flavor = flavors.get_flavor_by_flavor_id( id, ctxt=context, read_deleted="no") except exception.NotFound as e: raise webob.exc.HTTPNotFound(explanation=e.format_message()) flavors.destroy(flavor['name']) return webob.Response(status_int=202) @wsgi.action("create") @wsgi.serializers(xml=flavors_api.FlavorTemplate) def _create(self, req, body): context = req.environ['nova.context'] authorize(context) if not self.is_valid_body(body, 'flavor'): msg = _("Invalid request body") raise webob.exc.HTTPBadRequest(explanation=msg) vals = body['flavor'] name = vals.get('name') flavorid = vals.get('id') memory = vals.get('ram') vcpus = vals.get('vcpus') root_gb = vals.get('disk') ephemeral_gb = vals.get('OS-FLV-EXT-DATA:ephemeral', 0) swap = vals.get('swap', 0) rxtx_factor = vals.get('rxtx_factor', 1.0) is_public = vals.get('os-flavor-access:is_public', True) try: flavor = flavors.create(name, memory, vcpus, root_gb, ephemeral_gb=ephemeral_gb, flavorid=flavorid, swap=swap, rxtx_factor=rxtx_factor, is_public=is_public) if not flavor['is_public']: flavors.add_flavor_access(flavor['flavorid'], context.project_id, context) req.cache_db_flavor(flavor) except (exception.InstanceTypeExists, exception.InstanceTypeIdExists) as err: raise webob.exc.HTTPConflict(explanation=err.format_message()) except exception.InvalidInput as exc: raise webob.exc.HTTPBadRequest(explanation=exc.format_message()) return self._view_builder.show(req, flavor) class Flavormanage(extensions.ExtensionDescriptor): """ Flavor create/delete API support """ name = "FlavorManage" alias = "os-flavor-manage" namespace = ("http://docs.openstack.org/compute/ext/" "flavor_manage/api/v1.1") updated = "2012-01-19T00:00:00+00:00" def get_controller_extensions(self): controller = FlavorManageController() extension = extensions.ControllerExtension(self, 'flavors', controller) return [extension]
ntt-sic/nova
nova/api/openstack/compute/contrib/flavormanage.py
Python
apache-2.0
3,894
<?php # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/pubsub/v1/pubsub.proto namespace Google\Cloud\PubSub\V1; use Google\Protobuf\Internal\GPBType; use Google\Protobuf\Internal\RepeatedField; use Google\Protobuf\Internal\GPBUtil; /** * Request for the ModifyAckDeadline method. * * Generated from protobuf message <code>google.pubsub.v1.ModifyAckDeadlineRequest</code> */ class ModifyAckDeadlineRequest extends \Google\Protobuf\Internal\Message { /** * Required. The name of the subscription. * Format is `projects/{project}/subscriptions/{sub}`. * * Generated from protobuf field <code>string subscription = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = {</code> */ private $subscription = ''; /** * Required. List of acknowledgment IDs. * * Generated from protobuf field <code>repeated string ack_ids = 4 [(.google.api.field_behavior) = REQUIRED];</code> */ private $ack_ids; /** * Required. The new ack deadline with respect to the time this request was * sent to the Pub/Sub system. For example, if the value is 10, the new ack * deadline will expire 10 seconds after the `ModifyAckDeadline` call was * made. Specifying zero might immediately make the message available for * delivery to another subscriber client. This typically results in an * increase in the rate of message redeliveries (that is, duplicates). * The minimum deadline you can specify is 0 seconds. * The maximum deadline you can specify is 600 seconds (10 minutes). * * Generated from protobuf field <code>int32 ack_deadline_seconds = 3 [(.google.api.field_behavior) = REQUIRED];</code> */ private $ack_deadline_seconds = 0; /** * Constructor. * * @param array $data { * Optional. Data for populating the Message object. * * @type string $subscription * Required. The name of the subscription. * Format is `projects/{project}/subscriptions/{sub}`. * @type string[]|\Google\Protobuf\Internal\RepeatedField $ack_ids * Required. List of acknowledgment IDs. * @type int $ack_deadline_seconds * Required. The new ack deadline with respect to the time this request was * sent to the Pub/Sub system. For example, if the value is 10, the new ack * deadline will expire 10 seconds after the `ModifyAckDeadline` call was * made. Specifying zero might immediately make the message available for * delivery to another subscriber client. This typically results in an * increase in the rate of message redeliveries (that is, duplicates). * The minimum deadline you can specify is 0 seconds. * The maximum deadline you can specify is 600 seconds (10 minutes). * } */ public function __construct($data = NULL) { \GPBMetadata\Google\Pubsub\V1\Pubsub::initOnce(); parent::__construct($data); } /** * Required. The name of the subscription. * Format is `projects/{project}/subscriptions/{sub}`. * * Generated from protobuf field <code>string subscription = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = {</code> * @return string */ public function getSubscription() { return $this->subscription; } /** * Required. The name of the subscription. * Format is `projects/{project}/subscriptions/{sub}`. * * Generated from protobuf field <code>string subscription = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = {</code> * @param string $var * @return $this */ public function setSubscription($var) { GPBUtil::checkString($var, True); $this->subscription = $var; return $this; } /** * Required. List of acknowledgment IDs. * * Generated from protobuf field <code>repeated string ack_ids = 4 [(.google.api.field_behavior) = REQUIRED];</code> * @return \Google\Protobuf\Internal\RepeatedField */ public function getAckIds() { return $this->ack_ids; } /** * Required. List of acknowledgment IDs. * * Generated from protobuf field <code>repeated string ack_ids = 4 [(.google.api.field_behavior) = REQUIRED];</code> * @param string[]|\Google\Protobuf\Internal\RepeatedField $var * @return $this */ public function setAckIds($var) { $arr = GPBUtil::checkRepeatedField($var, \Google\Protobuf\Internal\GPBType::STRING); $this->ack_ids = $arr; return $this; } /** * Required. The new ack deadline with respect to the time this request was * sent to the Pub/Sub system. For example, if the value is 10, the new ack * deadline will expire 10 seconds after the `ModifyAckDeadline` call was * made. Specifying zero might immediately make the message available for * delivery to another subscriber client. This typically results in an * increase in the rate of message redeliveries (that is, duplicates). * The minimum deadline you can specify is 0 seconds. * The maximum deadline you can specify is 600 seconds (10 minutes). * * Generated from protobuf field <code>int32 ack_deadline_seconds = 3 [(.google.api.field_behavior) = REQUIRED];</code> * @return int */ public function getAckDeadlineSeconds() { return $this->ack_deadline_seconds; } /** * Required. The new ack deadline with respect to the time this request was * sent to the Pub/Sub system. For example, if the value is 10, the new ack * deadline will expire 10 seconds after the `ModifyAckDeadline` call was * made. Specifying zero might immediately make the message available for * delivery to another subscriber client. This typically results in an * increase in the rate of message redeliveries (that is, duplicates). * The minimum deadline you can specify is 0 seconds. * The maximum deadline you can specify is 600 seconds (10 minutes). * * Generated from protobuf field <code>int32 ack_deadline_seconds = 3 [(.google.api.field_behavior) = REQUIRED];</code> * @param int $var * @return $this */ public function setAckDeadlineSeconds($var) { GPBUtil::checkInt32($var); $this->ack_deadline_seconds = $var; return $this; } }
googleapis/google-cloud-php-pubsub
src/V1/ModifyAckDeadlineRequest.php
PHP
apache-2.0
6,591
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.util.xml.highlighting; import com.intellij.codeInspection.LocalQuickFix; import com.intellij.codeInspection.ProblemHighlightType; import com.intellij.lang.annotation.Annotation; import com.intellij.lang.annotation.HighlightSeverity; import com.intellij.openapi.util.TextRange; import com.intellij.psi.PsiReference; import com.intellij.util.xml.DomElement; import com.intellij.util.xml.DomFileElement; import com.intellij.util.xml.GenericDomValue; import com.intellij.util.xml.reflect.DomCollectionChildDescription; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; public interface DomElementAnnotationHolder extends Iterable<DomElementProblemDescriptor>{ boolean isOnTheFly(); @NotNull DomFileElement<?> getFileElement(); @NotNull DomElementProblemDescriptor createProblem(@NotNull DomElement domElement, @Nullable String message, LocalQuickFix... fixes); @NotNull DomElementProblemDescriptor createProblem(@NotNull DomElement domElement, DomCollectionChildDescription childDescription, @Nullable String message); @NotNull DomElementProblemDescriptor createProblem(@NotNull DomElement domElement, HighlightSeverity highlightType, String message); DomElementProblemDescriptor createProblem(@NotNull DomElement domElement, HighlightSeverity highlightType, String message, LocalQuickFix... fixes); DomElementProblemDescriptor createProblem(@NotNull DomElement domElement, HighlightSeverity highlightType, String message, TextRange textRange, LocalQuickFix... fixes); DomElementProblemDescriptor createProblem(@NotNull DomElement domElement, ProblemHighlightType highlightType, String message, @Nullable TextRange textRange, LocalQuickFix... fixes); @NotNull DomElementResolveProblemDescriptor createResolveProblem(@NotNull GenericDomValue element, @NotNull PsiReference reference); /** * Is useful only if called from {@link com.intellij.util.xml.highlighting.DomElementsAnnotator} instance * @param element element * @param severity highlight severity * @param message description * @return annotation */ @NotNull Annotation createAnnotation(@NotNull DomElement element, HighlightSeverity severity, @Nullable String message); int getSize(); }
paplorinc/intellij-community
xml/dom-openapi/src/com/intellij/util/xml/highlighting/DomElementAnnotationHolder.java
Java
apache-2.0
2,866
package com.mapswithme.maps.purchase; import android.app.Activity; import android.content.Intent; import android.os.Bundle; import android.text.TextUtils; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.TextView; import android.widget.Toast; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import com.android.billingclient.api.SkuDetails; import com.bumptech.glide.Glide; import com.mapswithme.maps.Framework; import com.mapswithme.maps.PrivateVariables; import com.mapswithme.maps.PurchaseOperationObservable; import com.mapswithme.maps.R; import com.mapswithme.maps.base.BaseMwmFragment; import com.mapswithme.maps.bookmarks.data.PaymentData; import com.mapswithme.maps.dialog.AlertDialogCallback; import com.mapswithme.util.Utils; import com.mapswithme.util.log.Logger; import com.mapswithme.util.log.LoggerFactory; import com.mapswithme.util.statistics.Statistics; import java.util.Collections; import java.util.List; public class BookmarkPaymentFragment extends BaseMwmFragment implements AlertDialogCallback, PurchaseStateActivator<BookmarkPaymentState> { static final String ARG_PAYMENT_DATA = "arg_payment_data"; private static final Logger LOGGER = LoggerFactory.INSTANCE.getLogger(LoggerFactory.Type.BILLING); private static final String TAG = BookmarkPaymentFragment.class.getSimpleName(); private static final String EXTRA_CURRENT_STATE = "extra_current_state"; private static final String EXTRA_PRODUCT_DETAILS = "extra_product_details"; private static final String EXTRA_SUBS_PRODUCT_DETAILS = "extra_subs_product_details"; private static final String EXTRA_VALIDATION_RESULT = "extra_validation_result"; @SuppressWarnings("NullableProblems") @NonNull private PurchaseController<PurchaseCallback> mPurchaseController; @SuppressWarnings("NullableProblems") @NonNull private BookmarkPurchaseCallback mPurchaseCallback; @SuppressWarnings("NullableProblems") @NonNull private PaymentData mPaymentData; @Nullable private ProductDetails mProductDetails; @Nullable private ProductDetails mSubsProductDetails; private boolean mValidationResult; @NonNull private BookmarkPaymentState mState = BookmarkPaymentState.NONE; @SuppressWarnings("NullableProblems") @NonNull private BillingManager<PlayStoreBillingCallback> mSubsProductDetailsLoadingManager; @NonNull private final SubsProductDetailsCallback mSubsProductDetailsCallback = new SubsProductDetailsCallback(); @Override public void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); Bundle args = getArguments(); if (args == null) throw new IllegalStateException("Args must be provided for payment fragment!"); PaymentData paymentData = args.getParcelable(ARG_PAYMENT_DATA); if (paymentData == null) throw new IllegalStateException("Payment data must be provided for payment fragment!"); mPaymentData = paymentData; mPurchaseCallback = new BookmarkPurchaseCallback(mPaymentData.getServerId()); } @Nullable @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { mPurchaseController = PurchaseFactory.createBookmarkPurchaseController(requireContext(), mPaymentData.getProductId(), mPaymentData.getServerId()); if (savedInstanceState != null) mPurchaseController.onRestore(savedInstanceState); mPurchaseController.initialize(requireActivity()); mSubsProductDetailsLoadingManager = PurchaseFactory.createSubscriptionBillingManager(); mSubsProductDetailsLoadingManager.initialize(requireActivity()); mSubsProductDetailsLoadingManager.addCallback(mSubsProductDetailsCallback); mSubsProductDetailsCallback.attach(this); View root = inflater.inflate(R.layout.fragment_bookmark_payment, container, false); View subscriptionButton = root.findViewById(R.id.buy_subs_btn); subscriptionButton.setOnClickListener(v -> onBuySubscriptionClicked()); TextView buyInappBtn = root.findViewById(R.id.buy_inapp_btn); buyInappBtn.setOnClickListener(v -> onBuyInappClicked()); return root; } private void onBuySubscriptionClicked() { SubscriptionType type = SubscriptionType.getTypeByBookmarksGroup(mPaymentData.getGroup()); if (type.equals(SubscriptionType.BOOKMARKS_SIGHTS)) { BookmarksSightsSubscriptionActivity.startForResult (this, PurchaseUtils.REQ_CODE_PAY_SUBSCRIPTION, Statistics.ParamValue.CARD); return; } BookmarksAllSubscriptionActivity.startForResult (this, PurchaseUtils.REQ_CODE_PAY_SUBSCRIPTION, Statistics.ParamValue.CARD); } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (resultCode != Activity.RESULT_OK) return; if (requestCode == PurchaseUtils.REQ_CODE_PAY_SUBSCRIPTION) { Intent intent = new Intent(); intent.putExtra(PurchaseUtils.EXTRA_IS_SUBSCRIPTION, true); requireActivity().setResult(Activity.RESULT_OK, intent); requireActivity().finish(); } } private void onBuyInappClicked() { Statistics.INSTANCE.trackPurchasePreviewSelect(mPaymentData.getServerId(), mPaymentData.getProductId()); Statistics.INSTANCE.trackPurchaseEvent(Statistics.EventName.INAPP_PURCHASE_PREVIEW_PAY, mPaymentData.getServerId(), Statistics.STATISTICS_CHANNEL_REALTIME); startPurchaseTransaction(); } @Override public boolean onBackPressed() { if (mState == BookmarkPaymentState.VALIDATION) { Toast.makeText(requireContext(), R.string.purchase_please_wait_toast, Toast.LENGTH_SHORT) .show(); return true; } Statistics.INSTANCE.trackPurchaseEvent(Statistics.EventName.INAPP_PURCHASE_PREVIEW_CANCEL, mPaymentData.getServerId()); return super.onBackPressed(); } @Override public void onViewCreated(View view, @Nullable Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); if (savedInstanceState == null) Statistics.INSTANCE.trackPurchasePreviewShow(mPaymentData.getServerId(), PrivateVariables.bookmarksVendor(), mPaymentData.getProductId()); LOGGER.d(TAG, "onViewCreated savedInstanceState = " + savedInstanceState); setInitialPaymentData(); loadImage(); if (savedInstanceState != null) { mProductDetails = savedInstanceState.getParcelable(EXTRA_PRODUCT_DETAILS); if (mProductDetails != null) updateProductDetails(); mSubsProductDetails = savedInstanceState.getParcelable(EXTRA_SUBS_PRODUCT_DETAILS); if (mSubsProductDetails != null) updateSubsProductDetails(); mValidationResult = savedInstanceState.getBoolean(EXTRA_VALIDATION_RESULT); BookmarkPaymentState savedState = BookmarkPaymentState.values()[savedInstanceState.getInt(EXTRA_CURRENT_STATE)]; activateState(savedState); return; } activateState(BookmarkPaymentState.PRODUCT_DETAILS_LOADING); mPurchaseController.queryProductDetails(); SubscriptionType type = SubscriptionType.getTypeByBookmarksGroup(mPaymentData.getGroup()); List<String> subsProductIds = Collections.singletonList(type.getMonthlyProductId()); mSubsProductDetailsLoadingManager.queryProductDetails(subsProductIds); } @Override public void onDestroyView() { super.onDestroyView(); mPurchaseController.destroy(); mSubsProductDetailsLoadingManager.removeCallback(mSubsProductDetailsCallback); mSubsProductDetailsCallback.detach(); mSubsProductDetailsLoadingManager.destroy(); } private void startPurchaseTransaction() { activateState(BookmarkPaymentState.TRANSACTION_STARTING); Framework.nativeStartPurchaseTransaction(mPaymentData.getServerId(), PrivateVariables.bookmarksVendor()); } void launchBillingFlow() { mPurchaseController.launchPurchaseFlow(mPaymentData.getProductId()); activateState(BookmarkPaymentState.PAYMENT_IN_PROGRESS); } @Override public void onStart() { super.onStart(); PurchaseOperationObservable observable = PurchaseOperationObservable.from(requireContext()); observable.addTransactionObserver(mPurchaseCallback); mPurchaseController.addCallback(mPurchaseCallback); mPurchaseCallback.attach(this); } @Override public void onStop() { super.onStop(); PurchaseOperationObservable observable = PurchaseOperationObservable.from(requireContext()); observable.removeTransactionObserver(mPurchaseCallback); mPurchaseController.removeCallback(); mPurchaseCallback.detach(); } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); LOGGER.d(TAG, "onSaveInstanceState"); outState.putInt(EXTRA_CURRENT_STATE, mState.ordinal()); outState.putParcelable(EXTRA_PRODUCT_DETAILS, mProductDetails); outState.putParcelable(EXTRA_SUBS_PRODUCT_DETAILS, mSubsProductDetails); mPurchaseController.onSave(outState); } @Override public void activateState(@NonNull BookmarkPaymentState state) { if (state == mState) return; LOGGER.i(TAG, "Activate state: " + state); mState = state; mState.activate(this); } private void loadImage() { if (TextUtils.isEmpty(mPaymentData.getImgUrl())) return; ImageView imageView = getViewOrThrow().findViewById(R.id.image); Glide.with(imageView.getContext()) .load(mPaymentData.getImgUrl()) .centerCrop() .into(imageView); } private void setInitialPaymentData() { TextView name = getViewOrThrow().findViewById(R.id.product_catalog_name); name.setText(mPaymentData.getName()); TextView author = getViewOrThrow().findViewById(R.id.author_name); author.setText(mPaymentData.getAuthorName()); } void handleProductDetails(@NonNull List<SkuDetails> details) { if (details.isEmpty()) return; SkuDetails skuDetails = details.get(0); mProductDetails = PurchaseUtils.toProductDetails(skuDetails); } void handleSubsProductDetails(@NonNull List<SkuDetails> details) { if (details.isEmpty()) return; SkuDetails skuDetails = details.get(0); mSubsProductDetails = PurchaseUtils.toProductDetails(skuDetails); } void handleValidationResult(boolean validationResult) { mValidationResult = validationResult; } @Override public void onAlertDialogPositiveClick(int requestCode, int which) { handleErrorDialogEvent(requestCode); } @Override public void onAlertDialogNegativeClick(int requestCode, int which) { // Do nothing by default. } @Override public void onAlertDialogCancel(int requestCode) { handleErrorDialogEvent(requestCode); } private void handleErrorDialogEvent(int requestCode) { switch (requestCode) { case PurchaseUtils.REQ_CODE_PRODUCT_DETAILS_FAILURE: requireActivity().finish(); break; case PurchaseUtils.REQ_CODE_START_TRANSACTION_FAILURE: case PurchaseUtils.REQ_CODE_PAYMENT_FAILURE: activateState(BookmarkPaymentState.PRODUCT_DETAILS_LOADED); break; } } void updateProductDetails() { if (mProductDetails == null) throw new AssertionError("Product details must be obtained at this moment!"); TextView buyButton = getViewOrThrow().findViewById(R.id.buy_inapp_btn); String price = Utils.formatCurrencyString(mProductDetails.getPrice(), mProductDetails.getCurrencyCode()); buyButton.setText(getString(R.string.buy_btn, price)); TextView storeName = getViewOrThrow().findViewById(R.id.product_store_name); storeName.setText(mProductDetails.getTitle()); } void updateSubsProductDetails() { if (mSubsProductDetails == null) throw new AssertionError("Subs product details must be obtained at this moment!"); String formattedPrice = Utils.formatCurrencyString(mSubsProductDetails.getPrice(), mSubsProductDetails.getCurrencyCode()); TextView subsButton = getViewOrThrow().findViewById(R.id.buy_subs_btn); subsButton.setText(getString(R.string.buy_btn_for_subscription_version_2, formattedPrice)); } void finishValidation() { if (mValidationResult) requireActivity().setResult(Activity.RESULT_OK); requireActivity().finish(); } }
matsprea/omim
android/src/com/mapswithme/maps/purchase/BookmarkPaymentFragment.java
Java
apache-2.0
13,042
/* * Copyright 2010-2013 Coda Hale and Yammer, Inc., 2014-2017 Dropwizard Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.dropwizard.metrics; import org.junit.Test; import io.dropwizard.metrics.SlidingWindowReservoir; import static org.assertj.core.api.Assertions.assertThat; public class SlidingWindowReservoirTest { private final SlidingWindowReservoir reservoir = new SlidingWindowReservoir(3); @Test public void handlesSmallDataStreams() throws Exception { reservoir.update(1); reservoir.update(2); assertThat(reservoir.getSnapshot().getValues()) .containsOnly(1, 2); } @Test public void onlyKeepsTheMostRecentFromBigDataStreams() throws Exception { reservoir.update(1); reservoir.update(2); reservoir.update(3); reservoir.update(4); assertThat(reservoir.getSnapshot().getValues()) .containsOnly(2, 3, 4); } }
networknt/light-4j
metrics/src/test/java/io/dropwizard/metrics/SlidingWindowReservoirTest.java
Java
apache-2.0
1,479
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #include <aws/codedeploy/model/TrafficRoutingType.h> #include <aws/core/utils/HashingUtils.h> #include <aws/core/Globals.h> #include <aws/core/utils/EnumParseOverflowContainer.h> using namespace Aws::Utils; namespace Aws { namespace CodeDeploy { namespace Model { namespace TrafficRoutingTypeMapper { static const int TimeBasedCanary_HASH = HashingUtils::HashString("TimeBasedCanary"); static const int TimeBasedLinear_HASH = HashingUtils::HashString("TimeBasedLinear"); static const int AllAtOnce_HASH = HashingUtils::HashString("AllAtOnce"); TrafficRoutingType GetTrafficRoutingTypeForName(const Aws::String& name) { int hashCode = HashingUtils::HashString(name.c_str()); if (hashCode == TimeBasedCanary_HASH) { return TrafficRoutingType::TimeBasedCanary; } else if (hashCode == TimeBasedLinear_HASH) { return TrafficRoutingType::TimeBasedLinear; } else if (hashCode == AllAtOnce_HASH) { return TrafficRoutingType::AllAtOnce; } EnumParseOverflowContainer* overflowContainer = Aws::GetEnumOverflowContainer(); if(overflowContainer) { overflowContainer->StoreOverflow(hashCode, name); return static_cast<TrafficRoutingType>(hashCode); } return TrafficRoutingType::NOT_SET; } Aws::String GetNameForTrafficRoutingType(TrafficRoutingType enumValue) { switch(enumValue) { case TrafficRoutingType::TimeBasedCanary: return "TimeBasedCanary"; case TrafficRoutingType::TimeBasedLinear: return "TimeBasedLinear"; case TrafficRoutingType::AllAtOnce: return "AllAtOnce"; default: EnumParseOverflowContainer* overflowContainer = Aws::GetEnumOverflowContainer(); if(overflowContainer) { return overflowContainer->RetrieveOverflow(static_cast<int>(enumValue)); } return {}; } } } // namespace TrafficRoutingTypeMapper } // namespace Model } // namespace CodeDeploy } // namespace Aws
awslabs/aws-sdk-cpp
aws-cpp-sdk-codedeploy/source/model/TrafficRoutingType.cpp
C++
apache-2.0
2,400
/* * * Copyright 2019 gRPC authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package grpc import ( "fmt" "testing" "google.golang.org/grpc/balancer" "google.golang.org/grpc/connectivity" "google.golang.org/grpc/resolver" "google.golang.org/grpc/resolver/manual" ) var _ balancer.V2Balancer = &funcBalancer{} type funcBalancer struct { updateClientConnState func(s balancer.ClientConnState) error } func (*funcBalancer) HandleSubConnStateChange(balancer.SubConn, connectivity.State) { panic("unimplemented") // v1 API } func (*funcBalancer) HandleResolvedAddrs([]resolver.Address, error) { panic("unimplemented") // v1 API } func (b *funcBalancer) UpdateClientConnState(s balancer.ClientConnState) error { return b.updateClientConnState(s) } func (*funcBalancer) ResolverError(error) {} func (*funcBalancer) UpdateSubConnState(balancer.SubConn, balancer.SubConnState) { panic("unimplemented") // we never have sub-conns } func (*funcBalancer) Close() {} type funcBalancerBuilder struct { name string instance *funcBalancer } func (b *funcBalancerBuilder) Build(balancer.ClientConn, balancer.BuildOptions) balancer.Balancer { return b.instance } func (b *funcBalancerBuilder) Name() string { return b.name } // TestBalancerErrorResolverPolling injects balancer errors and verifies // ResolveNow is called on the resolver with the appropriate backoff strategy // being consulted between ResolveNow calls. func (s) TestBalancerErrorResolverPolling(t *testing.T) { // The test balancer will return ErrBadResolverState iff the // ClientConnState contains no addresses. fb := &funcBalancer{ updateClientConnState: func(s balancer.ClientConnState) error { if len(s.ResolverState.Addresses) == 0 { return balancer.ErrBadResolverState } return nil }, } const balName = "BalancerErrorResolverPolling" balancer.Register(&funcBalancerBuilder{name: balName, instance: fb}) testResolverErrorPolling(t, func(r *manual.Resolver) { // No addresses so the balancer will fail. r.CC.UpdateState(resolver.State{}) }, func(r *manual.Resolver) { // UpdateState will block if ResolveNow is being called (which blocks on // rn), so call it in a goroutine. Include some address so the balancer // will be happy. go r.CC.UpdateState(resolver.State{Addresses: []resolver.Address{{Addr: "x"}}}) }, WithDefaultServiceConfig(fmt.Sprintf(`{ "loadBalancingConfig": [{"%v": {}}] }`, balName))) }
Miciah/origin
vendor/google.golang.org/grpc/balancer_conn_wrappers_test.go
GO
apache-2.0
2,966
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.directory.model.transform; import java.util.Map; import java.util.Map.Entry; import java.math.*; import java.nio.ByteBuffer; import com.amazonaws.services.directory.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * CreateConditionalForwarderResult JSON Unmarshaller */ public class CreateConditionalForwarderResultJsonUnmarshaller implements Unmarshaller<CreateConditionalForwarderResult, JsonUnmarshallerContext> { public CreateConditionalForwarderResult unmarshall( JsonUnmarshallerContext context) throws Exception { CreateConditionalForwarderResult createConditionalForwarderResult = new CreateConditionalForwarderResult(); return createConditionalForwarderResult; } private static CreateConditionalForwarderResultJsonUnmarshaller instance; public static CreateConditionalForwarderResultJsonUnmarshaller getInstance() { if (instance == null) instance = new CreateConditionalForwarderResultJsonUnmarshaller(); return instance; } }
flofreud/aws-sdk-java
aws-java-sdk-directory/src/main/java/com/amazonaws/services/directory/model/transform/CreateConditionalForwarderResultJsonUnmarshaller.java
Java
apache-2.0
1,808
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.util.xml.impl; import com.intellij.ide.highlighter.DomSupportEnabled; import com.intellij.openapi.Disposable; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.fileTypes.StdFileTypes; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Factory; import com.intellij.openapi.util.Key; import com.intellij.openapi.vfs.*; import com.intellij.openapi.vfs.newvfs.NewVirtualFile; import com.intellij.pom.PomManager; import com.intellij.pom.PomModel; import com.intellij.pom.PomModelAspect; import com.intellij.pom.event.PomModelEvent; import com.intellij.pom.event.PomModelListener; import com.intellij.pom.xml.XmlAspect; import com.intellij.pom.xml.XmlChangeSet; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiFileFactory; import com.intellij.psi.PsiManager; import com.intellij.psi.impl.PsiManagerEx; import com.intellij.psi.xml.XmlAttribute; import com.intellij.psi.xml.XmlElement; import com.intellij.psi.xml.XmlFile; import com.intellij.psi.xml.XmlTag; import com.intellij.reference.SoftReference; import com.intellij.semantic.SemKey; import com.intellij.semantic.SemService; import com.intellij.util.EventDispatcher; import com.intellij.util.SmartList; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.xml.*; import com.intellij.util.xml.events.DomEvent; import com.intellij.util.xml.reflect.AbstractDomChildrenDescription; import com.intellij.util.xml.reflect.DomGenericInfo; import net.sf.cglib.proxy.AdvancedProxy; import net.sf.cglib.proxy.InvocationHandler; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.lang.ref.WeakReference; import java.lang.reflect.Type; import java.util.*; /** * @author peter */ public final class DomManagerImpl extends DomManager { private static final Key<Object> MOCK = Key.create("MockElement"); static final Key<WeakReference<DomFileElementImpl>> CACHED_FILE_ELEMENT = Key.create("CACHED_FILE_ELEMENT"); static final Key<DomFileDescription> MOCK_DESCRIPTION = Key.create("MockDescription"); static final SemKey<FileDescriptionCachedValueProvider> FILE_DESCRIPTION_KEY = SemKey.createKey("FILE_DESCRIPTION_KEY"); static final SemKey<DomInvocationHandler> DOM_HANDLER_KEY = SemKey.createKey("DOM_HANDLER_KEY"); static final SemKey<IndexedElementInvocationHandler> DOM_INDEXED_HANDLER_KEY = DOM_HANDLER_KEY.subKey("DOM_INDEXED_HANDLER_KEY"); static final SemKey<CollectionElementInvocationHandler> DOM_COLLECTION_HANDLER_KEY = DOM_HANDLER_KEY.subKey("DOM_COLLECTION_HANDLER_KEY"); static final SemKey<CollectionElementInvocationHandler> DOM_CUSTOM_HANDLER_KEY = DOM_HANDLER_KEY.subKey("DOM_CUSTOM_HANDLER_KEY"); static final SemKey<AttributeChildInvocationHandler> DOM_ATTRIBUTE_HANDLER_KEY = DOM_HANDLER_KEY.subKey("DOM_ATTRIBUTE_HANDLER_KEY"); private final EventDispatcher<DomEventListener> myListeners = EventDispatcher.create(DomEventListener.class); private final Project myProject; private final SemService mySemService; private final DomApplicationComponent myApplicationComponent; private boolean myChanging; public DomManagerImpl(Project project) { super(project); myProject = project; mySemService = SemService.getSemService(project); myApplicationComponent = DomApplicationComponent.getInstance(); final PomModel pomModel = PomManager.getModel(project); pomModel.addModelListener(new PomModelListener() { @Override public void modelChanged(PomModelEvent event) { if (myChanging) return; final XmlChangeSet changeSet = (XmlChangeSet)event.getChangeSet(pomModel.getModelAspect(XmlAspect.class)); if (changeSet != null) { for (XmlFile file : changeSet.getChangedFiles()) { DomFileElementImpl<DomElement> element = getCachedFileElement(file); if (element != null) { fireEvent(new DomEvent(element, false)); } } } } @Override public boolean isAspectChangeInteresting(PomModelAspect aspect) { return aspect instanceof XmlAspect; } }, project); VirtualFileManager.getInstance().addVirtualFileListener(new VirtualFileListener() { private final List<DomEvent> myDeletionEvents = new SmartList<>(); @Override public void contentsChanged(@NotNull VirtualFileEvent event) { if (!event.isFromSave()) { fireEvents(calcDomChangeEvents(event.getFile())); } } @Override public void fileMoved(@NotNull VirtualFileMoveEvent event) { fireEvents(calcDomChangeEvents(event.getFile())); } @Override public void beforeFileDeletion(@NotNull final VirtualFileEvent event) { myDeletionEvents.addAll(calcDomChangeEvents(event.getFile())); } @Override public void fileDeleted(@NotNull VirtualFileEvent event) { if (!myDeletionEvents.isEmpty()) { fireEvents(myDeletionEvents); myDeletionEvents.clear(); } } @Override public void propertyChanged(@NotNull VirtualFilePropertyEvent event) { final VirtualFile file = event.getFile(); if (!file.isDirectory() && VirtualFile.PROP_NAME.equals(event.getPropertyName())) { fireEvents(calcDomChangeEvents(file)); } } }, myProject); } public long getPsiModificationCount() { return PsiManager.getInstance(getProject()).getModificationTracker().getModificationCount(); } public <T extends DomInvocationHandler> void cacheHandler(SemKey<T> key, XmlElement element, T handler) { mySemService.setCachedSemElement(key, element, handler); } private PsiFile getCachedPsiFile(VirtualFile file) { return PsiManagerEx.getInstanceEx(myProject).getFileManager().getCachedPsiFile(file); } private List<DomEvent> calcDomChangeEvents(final VirtualFile file) { if (!(file instanceof NewVirtualFile) || myProject.isDisposed()) { return Collections.emptyList(); } final List<DomEvent> events = ContainerUtil.newArrayList(); VfsUtilCore.visitChildrenRecursively(file, new VirtualFileVisitor() { @Override public boolean visitFile(@NotNull VirtualFile file) { if (myProject.isDisposed() || !ProjectFileIndex.SERVICE.getInstance(myProject).isInContent(file)) { return false; } if (!file.isDirectory() && StdFileTypes.XML == file.getFileType()) { final PsiFile psiFile = getCachedPsiFile(file); if (psiFile != null && StdFileTypes.XML.equals(psiFile.getFileType()) && psiFile instanceof XmlFile) { final DomFileElementImpl domElement = getCachedFileElement((XmlFile)psiFile); if (domElement != null) { events.add(new DomEvent(domElement, false)); } } } return true; } @Nullable @Override public Iterable<VirtualFile> getChildrenIterable(@NotNull VirtualFile file) { return ((NewVirtualFile)file).getCachedChildren(); } }); return events; } @SuppressWarnings({"MethodOverridesStaticMethodOfSuperclass"}) public static DomManagerImpl getDomManager(Project project) { return (DomManagerImpl)DomManager.getDomManager(project); } @Override public void addDomEventListener(DomEventListener listener, Disposable parentDisposable) { myListeners.addListener(listener, parentDisposable); } @Override public final ConverterManager getConverterManager() { return ServiceManager.getService(ConverterManager.class); } @Override public final ModelMerger createModelMerger() { return new ModelMergerImpl(); } final void fireEvent(DomEvent event) { if (mySemService.isInsideAtomicChange()) return; incModificationCount(); myListeners.getMulticaster().eventOccured(event); } private void fireEvents(Collection<DomEvent> events) { for (DomEvent event : events) { fireEvent(event); } } @Override public final DomGenericInfo getGenericInfo(final Type type) { return myApplicationComponent.getStaticGenericInfo(type); } @Nullable public static DomInvocationHandler getDomInvocationHandler(DomElement proxy) { if (proxy instanceof DomFileElement) { return null; } if (proxy instanceof DomInvocationHandler) { return (DomInvocationHandler)proxy; } final InvocationHandler handler = AdvancedProxy.getInvocationHandler(proxy); if (handler instanceof StableInvocationHandler) { //noinspection unchecked final DomElement element = ((StableInvocationHandler<DomElement>)handler).getWrappedElement(); return element == null ? null : getDomInvocationHandler(element); } if (handler instanceof DomInvocationHandler) { return (DomInvocationHandler)handler; } return null; } @NotNull public static DomInvocationHandler getNotNullHandler(DomElement proxy) { DomInvocationHandler handler = getDomInvocationHandler(proxy); if (handler == null) { throw new AssertionError("null handler for " + proxy); } return handler; } public static StableInvocationHandler getStableInvocationHandler(Object proxy) { return (StableInvocationHandler)AdvancedProxy.getInvocationHandler(proxy); } public DomApplicationComponent getApplicationComponent() { return myApplicationComponent; } @Override public final Project getProject() { return myProject; } @Override @NotNull public final <T extends DomElement> DomFileElementImpl<T> getFileElement(final XmlFile file, final Class<T> aClass, String rootTagName) { //noinspection unchecked if (file.getUserData(MOCK_DESCRIPTION) == null) { file.putUserData(MOCK_DESCRIPTION, new MockDomFileDescription<>(aClass, rootTagName, file.getViewProvider().getVirtualFile())); mySemService.clearCache(); } final DomFileElementImpl<T> fileElement = getFileElement(file); assert fileElement != null; return fileElement; } @SuppressWarnings({"unchecked"}) @NotNull final <T extends DomElement> FileDescriptionCachedValueProvider<T> getOrCreateCachedValueProvider(final XmlFile xmlFile) { //noinspection ConstantConditions return mySemService.getSemElement(FILE_DESCRIPTION_KEY, xmlFile); } public final Set<DomFileDescription> getFileDescriptions(String rootTagName) { return myApplicationComponent.getFileDescriptions(rootTagName); } public final Set<DomFileDescription> getAcceptingOtherRootTagNameDescriptions() { return myApplicationComponent.getAcceptingOtherRootTagNameDescriptions(); } @NotNull @NonNls public final String getComponentName() { return getClass().getName(); } final void runChange(Runnable change) { final boolean b = setChanging(true); try { change.run(); } finally { setChanging(b); } } final boolean setChanging(final boolean changing) { boolean oldChanging = myChanging; if (changing) { assert !oldChanging; } myChanging = changing; return oldChanging; } @Override @Nullable public final <T extends DomElement> DomFileElementImpl<T> getFileElement(XmlFile file) { if (file == null) return null; if (!(file.getFileType() instanceof DomSupportEnabled)) return null; final VirtualFile virtualFile = file.getVirtualFile(); if (virtualFile != null && virtualFile.isDirectory()) return null; return this.<T>getOrCreateCachedValueProvider(file).getFileElement(); } @Nullable static <T extends DomElement> DomFileElementImpl<T> getCachedFileElement(@NotNull XmlFile file) { //noinspection unchecked return SoftReference.dereference(file.getUserData(CACHED_FILE_ELEMENT)); } @Override @Nullable public final <T extends DomElement> DomFileElementImpl<T> getFileElement(XmlFile file, Class<T> domClass) { final DomFileDescription description = getDomFileDescription(file); if (description != null && myApplicationComponent.assignabilityCache.isAssignable(domClass, description.getRootElementClass())) { return getFileElement(file); } return null; } @Override @Nullable public final DomElement getDomElement(final XmlTag element) { if (myChanging) return null; final DomInvocationHandler handler = getDomHandler(element); return handler != null ? handler.getProxy() : null; } @Override @Nullable public GenericAttributeValue getDomElement(final XmlAttribute attribute) { if (myChanging) return null; final AttributeChildInvocationHandler handler = mySemService.getSemElement(DOM_ATTRIBUTE_HANDLER_KEY, attribute); return handler == null ? null : (GenericAttributeValue)handler.getProxy(); } @Nullable public DomInvocationHandler getDomHandler(final XmlElement tag) { if (tag == null) return null; List<DomInvocationHandler> cached = mySemService.getCachedSemElements(DOM_HANDLER_KEY, tag); if (cached != null && !cached.isEmpty()) { return cached.get(0); } return mySemService.getSemElement(DOM_HANDLER_KEY, tag); } @Override @Nullable public AbstractDomChildrenDescription findChildrenDescription(@NotNull final XmlTag tag, @NotNull final DomElement parent) { return findChildrenDescription(tag, getDomInvocationHandler(parent)); } static AbstractDomChildrenDescription findChildrenDescription(final XmlTag tag, final DomInvocationHandler parent) { final DomGenericInfoEx info = parent.getGenericInfo(); return info.findChildrenDescription(parent, tag.getLocalName(), tag.getNamespace(), false, tag.getName()); } public final boolean isDomFile(@Nullable PsiFile file) { return file instanceof XmlFile && getFileElement((XmlFile)file) != null; } @Nullable public final DomFileDescription<?> getDomFileDescription(PsiElement element) { if (element instanceof XmlElement) { final PsiFile psiFile = element.getContainingFile(); if (psiFile instanceof XmlFile) { return getDomFileDescription((XmlFile)psiFile); } } return null; } @Override public final <T extends DomElement> T createMockElement(final Class<T> aClass, final Module module, final boolean physical) { final XmlFile file = (XmlFile)PsiFileFactory.getInstance(myProject).createFileFromText("a.xml", StdFileTypes.XML, "", (long)0, physical); file.putUserData(MOCK_ELEMENT_MODULE, module); file.putUserData(MOCK, new Object()); return getFileElement(file, aClass, "I_sincerely_hope_that_nobody_will_have_such_a_root_tag_name").getRootElement(); } @Override public final boolean isMockElement(DomElement element) { return DomUtil.getFile(element).getUserData(MOCK) != null; } @Override public final <T extends DomElement> T createStableValue(final Factory<T> provider) { return createStableValue(provider, t -> t.isValid()); } @Override public final <T> T createStableValue(final Factory<T> provider, final Condition<T> validator) { final T initial = provider.create(); assert initial != null; final StableInvocationHandler handler = new StableInvocationHandler<>(initial, provider, validator); final Set<Class> intf = new HashSet<>(); ContainerUtil.addAll(intf, initial.getClass().getInterfaces()); intf.add(StableElement.class); //noinspection unchecked return (T)AdvancedProxy.createProxy(initial.getClass().getSuperclass(), intf.toArray(new Class[intf.size()]), handler); } public final <T extends DomElement> void registerFileDescription(final DomFileDescription<T> description, Disposable parentDisposable) { registerFileDescription(description); Disposer.register(parentDisposable, new Disposable() { @Override public void dispose() { getFileDescriptions(description.getRootTagName()).remove(description); getAcceptingOtherRootTagNameDescriptions().remove(description); } }); } @Override public final void registerFileDescription(final DomFileDescription description) { mySemService.clearCache(); myApplicationComponent.registerFileDescription(description); } @Override @NotNull public final DomElement getResolvingScope(GenericDomValue element) { final DomFileDescription<?> description = DomUtil.getFileElement(element).getFileDescription(); return description.getResolveScope(element); } @Override @Nullable public final DomElement getIdentityScope(DomElement element) { final DomFileDescription description = DomUtil.getFileElement(element).getFileDescription(); return description.getIdentityScope(element); } @Override public TypeChooserManager getTypeChooserManager() { return myApplicationComponent.getTypeChooserManager(); } public void performAtomicChange(@NotNull Runnable change) { mySemService.performAtomicChange(change); if (!mySemService.isInsideAtomicChange()) { incModificationCount(); } } public SemService getSemService() { return mySemService; } }
ThiagoGarciaAlves/intellij-community
xml/dom-impl/src/com/intellij/util/xml/impl/DomManagerImpl.java
Java
apache-2.0
17,979
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #include <aws/securityhub/model/StandardsSubscriptionRequest.h> #include <aws/core/utils/json/JsonSerializer.h> #include <utility> using namespace Aws::Utils::Json; using namespace Aws::Utils; namespace Aws { namespace SecurityHub { namespace Model { StandardsSubscriptionRequest::StandardsSubscriptionRequest() : m_standardsArnHasBeenSet(false), m_standardsInputHasBeenSet(false) { } StandardsSubscriptionRequest::StandardsSubscriptionRequest(JsonView jsonValue) : m_standardsArnHasBeenSet(false), m_standardsInputHasBeenSet(false) { *this = jsonValue; } StandardsSubscriptionRequest& StandardsSubscriptionRequest::operator =(JsonView jsonValue) { if(jsonValue.ValueExists("StandardsArn")) { m_standardsArn = jsonValue.GetString("StandardsArn"); m_standardsArnHasBeenSet = true; } if(jsonValue.ValueExists("StandardsInput")) { Aws::Map<Aws::String, JsonView> standardsInputJsonMap = jsonValue.GetObject("StandardsInput").GetAllObjects(); for(auto& standardsInputItem : standardsInputJsonMap) { m_standardsInput[standardsInputItem.first] = standardsInputItem.second.AsString(); } m_standardsInputHasBeenSet = true; } return *this; } JsonValue StandardsSubscriptionRequest::Jsonize() const { JsonValue payload; if(m_standardsArnHasBeenSet) { payload.WithString("StandardsArn", m_standardsArn); } if(m_standardsInputHasBeenSet) { JsonValue standardsInputJsonMap; for(auto& standardsInputItem : m_standardsInput) { standardsInputJsonMap.WithString(standardsInputItem.first, standardsInputItem.second); } payload.WithObject("StandardsInput", std::move(standardsInputJsonMap)); } return payload; } } // namespace Model } // namespace SecurityHub } // namespace Aws
awslabs/aws-sdk-cpp
aws-cpp-sdk-securityhub/source/model/StandardsSubscriptionRequest.cpp
C++
apache-2.0
1,905
require 'puppet/node/facts' require 'puppet/indirector/rest' require 'puppet/util/puppetdb' class Puppet::Node::Facts::Puppetdb < Puppet::Indirector::REST include Puppet::Util::Puppetdb include Puppet::Util::Puppetdb::CommandNames def save(request) facts = request.instance.dup facts.values = facts.values.dup facts.stringify submit_command(request.key, facts.to_pson, CommandReplaceFacts, 1) end def find(request) begin response = http_get(request, "/v2/nodes/#{CGI.escape(request.key)}/facts", headers) log_x_deprecation_header(response) if response.is_a? Net::HTTPSuccess result = PSON.parse(response.body) # Note: the Inventory Service API appears to expect us to return nil here # if the node isn't found. However, PuppetDB returns an empty array in # this case; for now we will just look for that condition and assume that # it means that the node wasn't found, so we will return nil. In the # future we may want to improve the logic such that we can distinguish # between the "node not found" and the "no facts for this node" cases. if result.empty? return nil end facts = result.inject({}) do |a,h| a.merge(h['name'] => h['value']) end Puppet::Node::Facts.new(request.key, facts) else # Newline characters cause an HTTP error, so strip them raise "[#{response.code} #{response.message}] #{response.body.gsub(/[\r\n]/, '')}" end rescue => e raise Puppet::Error, "Failed to find facts from PuppetDB at #{self.class.server}:#{self.class.port}: #{e}" end end # Search for nodes matching a set of fact constraints. The constraints are # specified as a hash of the form: # # `{type.name.operator => value` # # The only accepted `type` is 'facts'. # # `name` must be the fact name to query against. # # `operator` may be one of {eq, ne, lt, gt, le, ge}, and will default to 'eq' # if unspecified. def search(request) return [] unless request.options operator_map = { 'eq' => '=', 'gt' => '>', 'lt' => '<', 'ge' => '>=', 'le' => '<=', } filters = request.options.sort.map do |key,value| type, name, operator = key.to_s.split('.') operator ||= 'eq' raise Puppet::Error, "Fact search against keys of type '#{type}' is unsupported" unless type == 'facts' if operator == 'ne' ['not', ['=', ['fact', name], value]] else [operator_map[operator], ['fact', name], value] end end query = ["and"] + filters query_param = CGI.escape(query.to_pson) begin response = http_get(request, "/v2/nodes?query=#{query_param}", headers) log_x_deprecation_header(response) if response.is_a? Net::HTTPSuccess PSON.parse(response.body).collect {|s| s["name"]} else # Newline characters cause an HTTP error, so strip them raise "[#{response.code} #{response.message}] #{response.body.gsub(/[\r\n]/, '')}" end rescue => e raise Puppet::Error, "Could not perform inventory search from PuppetDB at #{self.class.server}:#{self.class.port}: #{e}" end end def headers { "Accept" => "application/json", "Content-Type" => "application/x-www-form-urlencoded; charset=UTF-8", } end end
melissa/puppetdb
puppet/lib/puppet/indirector/facts/puppetdb.rb
Ruby
apache-2.0
3,402
/* * Copyright 2021 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.quarkus.runtime.storage.database.liquibase; import java.lang.reflect.Method; import java.sql.Connection; import javax.xml.parsers.SAXParserFactory; import org.jboss.logging.Logger; import org.keycloak.Config; import org.keycloak.connections.jpa.updater.liquibase.conn.LiquibaseConnectionProvider; import org.keycloak.connections.jpa.updater.liquibase.conn.LiquibaseConnectionProviderFactory; import org.keycloak.models.KeycloakSession; import org.keycloak.models.KeycloakSessionFactory; import liquibase.Liquibase; import liquibase.database.Database; import liquibase.database.DatabaseFactory; import liquibase.database.jvm.JdbcConnection; import liquibase.exception.LiquibaseException; import liquibase.parser.ChangeLogParser; import liquibase.parser.ChangeLogParserFactory; import liquibase.parser.core.xml.XMLChangeLogSAXParser; import liquibase.resource.ClassLoaderResourceAccessor; import liquibase.resource.ResourceAccessor; public class QuarkusLiquibaseConnectionProvider implements LiquibaseConnectionProviderFactory, LiquibaseConnectionProvider { private static final Logger logger = Logger.getLogger(QuarkusLiquibaseConnectionProvider.class); private volatile boolean initialized = false; private ClassLoaderResourceAccessor resourceAccessor; @Override public LiquibaseConnectionProvider create(KeycloakSession session) { if (!initialized) { synchronized (this) { if (!initialized) { baseLiquibaseInitialization(session); initialized = true; } } } return this; } protected void baseLiquibaseInitialization(KeycloakSession session) { resourceAccessor = new ClassLoaderResourceAccessor(getClass().getClassLoader()); // disables XML validation for (ChangeLogParser parser : ChangeLogParserFactory.getInstance().getParsers()) { if (parser instanceof XMLChangeLogSAXParser) { Method getSaxParserFactory = null; try { getSaxParserFactory = XMLChangeLogSAXParser.class.getDeclaredMethod("getSaxParserFactory"); getSaxParserFactory.setAccessible(true); SAXParserFactory saxParserFactory = (SAXParserFactory) getSaxParserFactory.invoke(parser); saxParserFactory.setValidating(false); saxParserFactory.setSchema(null); } catch (Exception e) { logger.warnf("Failed to disable liquibase XML validations"); } finally { if (getSaxParserFactory != null) { getSaxParserFactory.setAccessible(false); } } } } } @Override public void init(Config.Scope config) { } @Override public void postInit(KeycloakSessionFactory factory) { } @Override public void close() { } @Override public String getId() { return "quarkus"; } @Override public Liquibase getLiquibase(Connection connection, String defaultSchema) throws LiquibaseException { Database database = DatabaseFactory.getInstance().findCorrectDatabaseImplementation(new JdbcConnection(connection)); if (defaultSchema != null) { database.setDefaultSchemaName(defaultSchema); } String changelog = QuarkusJpaUpdaterProvider.CHANGELOG; logger.debugf("Using changelog file %s and changelogTableName %s", changelog, database.getDatabaseChangeLogTableName()); return new Liquibase(changelog, resourceAccessor, database); } @Override public Liquibase getLiquibaseForCustomUpdate(Connection connection, String defaultSchema, String changelogLocation, ClassLoader classloader, String changelogTableName) throws LiquibaseException { Database database = DatabaseFactory.getInstance().findCorrectDatabaseImplementation(new JdbcConnection(connection)); if (defaultSchema != null) { database.setDefaultSchemaName(defaultSchema); } ResourceAccessor resourceAccessor = new ClassLoaderResourceAccessor(classloader); database.setDatabaseChangeLogTableName(changelogTableName); logger.debugf("Using changelog file %s and changelogTableName %s", changelogLocation, database.getDatabaseChangeLogTableName()); return new Liquibase(changelogLocation, resourceAccessor, database); } @Override public int order() { return 100; } }
stianst/keycloak
quarkus/runtime/src/main/java/org/keycloak/quarkus/runtime/storage/database/liquibase/QuarkusLiquibaseConnectionProvider.java
Java
apache-2.0
5,267