code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
<?php /** * Magento * * NOTICE OF LICENSE * * This source file is subject to the Open Software License (OSL 3.0) * that is bundled with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://opensource.org/licenses/osl-3.0.php * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to license@magentocommerce.com so we can send you a copy immediately. * * DISCLAIMER * * Do not edit or add to this file if you wish to upgrade Magento to newer * versions in the future. If you wish to customize Magento for your * needs please refer to http://www.magentocommerce.com for more information. * * @category Mage * @package Mage_Adminhtml * @copyright Copyright (c) 2012 Magento Inc. (http://www.magentocommerce.com) * @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0) */ class Mage_Adminhtml_Model_System_Config_Source_Language { protected $_options; public function toOptionArray($isMultiselect) { if (!$this->_options) { $this->_options = Mage::getResourceModel('core/language_collection')->loadData()->toOptionArray(); } $options = $this->_options; if(!$isMultiselect){ array_unshift($options, array('value'=>'', 'label'=>'')); } return $options; } }
keegan2149/magento
sites/default/app/code/core/Mage/Adminhtml/Model/System/Config/Source/Language.php
PHP
gpl-2.0
1,443
<?php /** * Provides a plugin for the '@type' meta tag. */ class SchemaImageObjectType extends SchemaTypeBase { /** * {@inheritdoc} */ public static function labels() { return [ 'ImageObject', ]; } }
charlie59/non-texas-2
sites/all/modules/schema_metatag/schema_image_object/src/SchemaImageObjectType.php
PHP
gpl-2.0
231
/* * Copyright (C) 2013 Team XBMC * http://xbmc.org * * This Program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2, or (at your option) * any later version. * * This Program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with XBMC; see the file COPYING. If not, see * <http://www.gnu.org/licenses/>. * */ #include "DirectoryProvider.h" #include "filesystem/Directory.h" #include "filesystem/FavouritesDirectory.h" #include "guilib/GUIWindowManager.h" #include "utils/JobManager.h" #include "utils/XMLUtils.h" #include "utils/URIUtils.h" #include "threads/SingleLock.h" #include "FileItem.h" #include "video/VideoThumbLoader.h" #include "music/MusicThumbLoader.h" #include "pictures/PictureThumbLoader.h" #include "interfaces/AnnouncementManager.h" #include <memory> using namespace std; using namespace XFILE; using namespace ANNOUNCEMENT; class CDirectoryJob : public CJob { public: CDirectoryJob(const std::string &url, int limit, int parentID) : m_url(url), m_limit(limit), m_parentID(parentID) {}; virtual ~CDirectoryJob() {}; virtual const char* GetType() const { return "directory"; }; virtual bool operator==(const CJob *job) const { if (strcmp(job->GetType(),GetType()) == 0) { const CDirectoryJob* dirJob = dynamic_cast<const CDirectoryJob*>(job); if (dirJob && dirJob->m_url == m_url) return true; } return false; } virtual bool DoWork() { CFileItemList items; if (CDirectory::GetDirectory(m_url, items, "")) { // limit must not exceed the number of items int limit = (m_limit == 0) ? items.Size() : min((int) m_limit, items.Size()); // convert to CGUIStaticItem's and set visibility and targets m_items.reserve(limit); for (int i = 0; i < limit; i++) { CGUIStaticItemPtr item(new CGUIStaticItem(*items[i])); if (item->HasProperty("node.visible")) item->SetVisibleCondition(item->GetProperty("node.visible").asString(), m_parentID); getThumbLoader(item)->LoadItem(item.get()); m_items.push_back(item); } m_target = items.GetProperty("node.target").asString(); } return true; } std::shared_ptr<CThumbLoader> getThumbLoader(CGUIStaticItemPtr &item) { if (item->IsVideo()) { initThumbLoader<CVideoThumbLoader>(VIDEO); return m_thumbloaders[VIDEO]; } if (item->IsAudio()) { initThumbLoader<CMusicThumbLoader>(AUDIO); return m_thumbloaders[AUDIO]; } if (item->IsPicture()) { initThumbLoader<CPictureThumbLoader>(PICTURE); return m_thumbloaders[PICTURE]; } initThumbLoader<CProgramThumbLoader>(PROGRAM); return m_thumbloaders[PROGRAM]; } template<class CThumbLoaderClass> void initThumbLoader(InfoTagType type) { if (!m_thumbloaders.count(type)) { std::shared_ptr<CThumbLoader> thumbLoader = std::make_shared<CThumbLoaderClass>(); thumbLoader->OnLoaderStart(); m_thumbloaders.insert(make_pair(type, thumbLoader)); } } const std::vector<CGUIStaticItemPtr> &GetItems() const { return m_items; } const std::string &GetTarget() const { return m_target; } std::vector<InfoTagType> GetItemTypes(std::vector<InfoTagType> &itemTypes) const { itemTypes.clear(); for (std::map<InfoTagType, std::shared_ptr<CThumbLoader> >::const_iterator i = m_thumbloaders.begin(); i != m_thumbloaders.end(); ++i) itemTypes.push_back(i->first); return itemTypes; } private: std::string m_url; std::string m_target; unsigned int m_limit; int m_parentID; std::vector<CGUIStaticItemPtr> m_items; std::map<InfoTagType, std::shared_ptr<CThumbLoader> > m_thumbloaders; }; CDirectoryProvider::CDirectoryProvider(const TiXmlElement *element, int parentID) : IListProvider(parentID), m_updateTime(0), m_updateState(OK), m_isAnnounced(false), m_jobID(0), m_currentLimit(0) { assert(element); if (!element->NoChildren()) { const char *target = element->Attribute("target"); if (target) m_target.SetLabel(target, "", parentID); const char *limit = element->Attribute("limit"); if (limit) m_limit.SetLabel(limit, "", parentID); m_url.SetLabel(element->FirstChild()->ValueStr(), "", parentID); } } CDirectoryProvider::~CDirectoryProvider() { Reset(true); } bool CDirectoryProvider::Update(bool forceRefresh) { // we never need to force refresh here bool changed = false; bool fireJob = false; { CSingleLock lock(m_section); if (m_updateState == DONE) changed = true; else if (m_updateState == PENDING) fireJob = true; m_updateState = OK; } // update the URL & limit and fire off a new job if needed fireJob |= UpdateURL(); fireJob |= UpdateLimit(); if (fireJob) FireJob(); for (vector<CGUIStaticItemPtr>::iterator i = m_items.begin(); i != m_items.end(); ++i) changed |= (*i)->UpdateVisibility(m_parentID); return changed; // TODO: Also returned changed if properties are changed (if so, need to update scroll to letter). } void CDirectoryProvider::Announce(AnnouncementFlag flag, const char *sender, const char *message, const CVariant &data) { // we are only interested in library changes if ((flag & (VideoLibrary | AudioLibrary)) == 0) return; { CSingleLock lock(m_section); // we don't need to refresh anything if there are no fitting // items in this list provider for the announcement flag if (((flag & VideoLibrary) && (std::find(m_itemTypes.begin(), m_itemTypes.end(), VIDEO) == m_itemTypes.end())) || ((flag & AudioLibrary) && (std::find(m_itemTypes.begin(), m_itemTypes.end(), AUDIO) == m_itemTypes.end()))) return; // if we're in a database transaction, don't bother doing anything just yet if (data.isMember("transaction") && data["transaction"].asBoolean()) return; // if there was a database update, we set the update state // to PENDING to fire off a new job in the next update if (strcmp(message, "OnScanFinished") == 0 || strcmp(message, "OnCleanFinished") == 0 || strcmp(message, "OnUpdate") == 0 || strcmp(message, "OnRemove") == 0) m_updateState = PENDING; } } void CDirectoryProvider::Fetch(vector<CGUIListItemPtr> &items) const { CSingleLock lock(m_section); items.clear(); for (vector<CGUIStaticItemPtr>::const_iterator i = m_items.begin(); i != m_items.end(); ++i) { if ((*i)->IsVisible()) items.push_back(*i); } } void CDirectoryProvider::Reset(bool immediately /* = false */) { // cancel any pending jobs CSingleLock lock(m_section); if (m_jobID) CJobManager::GetInstance().CancelJob(m_jobID); m_jobID = 0; // reset only if this is going to be destructed if (immediately) { m_items.clear(); m_currentTarget.clear(); m_currentUrl.clear(); m_itemTypes.clear(); m_currentLimit = 0; m_updateState = OK; RegisterListProvider(false); } } void CDirectoryProvider::OnJobComplete(unsigned int jobID, bool success, CJob *job) { CSingleLock lock(m_section); if (success) { m_items = ((CDirectoryJob*)job)->GetItems(); m_currentTarget = ((CDirectoryJob*)job)->GetTarget(); ((CDirectoryJob*)job)->GetItemTypes(m_itemTypes); m_updateState = DONE; } m_jobID = 0; } bool CDirectoryProvider::OnClick(const CGUIListItemPtr &item) { CFileItem fileItem(*std::static_pointer_cast<CFileItem>(item)); string target = fileItem.GetProperty("node.target").asString(); if (target.empty()) target = m_currentTarget; if (target.empty()) target = m_target.GetLabel(m_parentID, false); if (fileItem.HasProperty("node.target_url")) fileItem.SetPath(fileItem.GetProperty("node.target_url").asString()); // grab the execute string string execute = CFavouritesDirectory::GetExecutePath(fileItem, target); if (!execute.empty()) { CGUIMessage message(GUI_MSG_EXECUTE, 0, 0); message.SetStringParam(execute); g_windowManager.SendMessage(message); return true; } return false; } bool CDirectoryProvider::IsUpdating() const { CSingleLock lock(m_section); return m_jobID || (m_updateState == DONE); } void CDirectoryProvider::FireJob() { CSingleLock lock(m_section); if (m_jobID) CJobManager::GetInstance().CancelJob(m_jobID); m_jobID = CJobManager::GetInstance().AddJob(new CDirectoryJob(m_currentUrl, m_currentLimit, m_parentID), this); } void CDirectoryProvider::RegisterListProvider(bool hasLibraryContent) { if (hasLibraryContent && !m_isAnnounced) { m_isAnnounced = true; CAnnouncementManager::Get().AddAnnouncer(this); } else if (!hasLibraryContent && m_isAnnounced) { m_isAnnounced = false; CAnnouncementManager::Get().RemoveAnnouncer(this); } } bool CDirectoryProvider::UpdateURL() { std::string value(m_url.GetLabel(m_parentID, false)); if (value == m_currentUrl) return false; m_currentUrl = value; // Register this provider only if we have library content RegisterListProvider(URIUtils::IsLibraryContent(m_currentUrl)); return true; } bool CDirectoryProvider::UpdateLimit() { unsigned int value = m_limit.GetIntValue(m_parentID); if (value == m_currentLimit) return false; m_currentLimit = value; return true; }
jmarcet/kodi
xbmc/listproviders/DirectoryProvider.cpp
C++
gpl-2.0
9,724
/* * Copyright (c) 2007, 2012, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package org.graalvm.compiler.jtt.bytecode; import org.junit.Test; import org.graalvm.compiler.jtt.JTTTest; /* */ public class BC_d2f extends JTTTest { public static float test(double d) { return (float) d; } @Test public void run0() throws Throwable { runTest("test", 0.0d); } @Test public void run1() throws Throwable { runTest("test", 1.0d); } @Test public void run2() throws Throwable { runTest("test", -1.06d); } }
YouDiSN/OpenJDK-Research
jdk9/hotspot/src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.jtt/src/org/graalvm/compiler/jtt/bytecode/BC_d2f.java
Java
gpl-2.0
1,561
"use strict"; function $__interopRequire(id) { id = require(id); return id && id.__esModule && id || {default: id}; } Object.defineProperties(module.exports, { __esModule: {value: true}, entries: { enumerable: true, get: function() { return entries; } }, keys: { enumerable: true, get: function() { return keys; } }, values: { enumerable: true, get: function() { return values; } } }); var $__createClass = $__interopRequire("traceur/dist/commonjs/runtime/modules/createClass.js").default; var $__3 = require("./utils.js"), toObject = $__3.toObject, toUint32 = $__3.toUint32, createIteratorResultObject = $__3.createIteratorResultObject; var ARRAY_ITERATOR_KIND_KEYS = 1; var ARRAY_ITERATOR_KIND_VALUES = 2; var ARRAY_ITERATOR_KIND_ENTRIES = 3; var ArrayIterator = function() { var $__1; function ArrayIterator() {} return ($__createClass)(ArrayIterator, ($__1 = {}, Object.defineProperty($__1, "next", { value: function() { var iterator = toObject(this); var array = iterator.iteratorObject_; if (!array) { throw new TypeError('Object is not an ArrayIterator'); } var index = iterator.arrayIteratorNextIndex_; var itemKind = iterator.arrayIterationKind_; var length = toUint32(array.length); if (index >= length) { iterator.arrayIteratorNextIndex_ = Infinity; return createIteratorResultObject(undefined, true); } iterator.arrayIteratorNextIndex_ = index + 1; if (itemKind == ARRAY_ITERATOR_KIND_VALUES) return createIteratorResultObject(array[index], false); if (itemKind == ARRAY_ITERATOR_KIND_ENTRIES) return createIteratorResultObject([index, array[index]], false); return createIteratorResultObject(index, false); }, configurable: true, enumerable: true, writable: true }), Object.defineProperty($__1, Symbol.iterator, { value: function() { return this; }, configurable: true, enumerable: true, writable: true }), $__1), {}); }(); function createArrayIterator(array, kind) { var object = toObject(array); var iterator = new ArrayIterator; iterator.iteratorObject_ = object; iterator.arrayIteratorNextIndex_ = 0; iterator.arrayIterationKind_ = kind; return iterator; } function entries() { return createArrayIterator(this, ARRAY_ITERATOR_KIND_ENTRIES); } function keys() { return createArrayIterator(this, ARRAY_ITERATOR_KIND_KEYS); } function values() { return createArrayIterator(this, ARRAY_ITERATOR_KIND_VALUES); }
Haeresis/TestCaseAtlas
webapp/src/assets/traceur/dist/commonjs/runtime/polyfills/ArrayIterator.js
JavaScript
gpl-2.0
2,600
<?php /** * * Handle the waitinglist * * @package VirtueMart * @subpackage Product * @author RolandD * @link http://www.virtuemart.net * @copyright Copyright (c) 2004 - 2010 VirtueMart Team. All rights reserved. * @license http://www.gnu.org/copyleft/gpl.html GNU/GPL, see LICENSE.php * VirtueMart is free software. This version may have been modified pursuant * to the GNU General Public License, and as distributed it includes or * is derivative of works licensed under the GNU General Public License or * other free or open source software licenses. * @version $Id: product_edit_waitinglist.php 2978 2011-04-06 14:21:19Z alatak $ */ // Check to ensure this file is included in Joomla! defined('_JEXEC') or die('Restricted access'); if (isset($this->product->customfields_parent_id)) { ?> <label><?php echo JText::_('COM_VIRTUEMART_CUSTOM_SAVE_FROM_CHILD');?><input type="checkbox" name="save_customfields" value="1" /></label> <?php } else {?> <input type="hidden" name="save_customfields" value="1" /> <?php } ?> <table id="customfieldsTable" width="100%"> <tr> <td valign="top" width="%100"> <?php $i=0; $tables= array('categories'=>'','products'=>'','fields'=>'','customPlugins'=>'',); if (isset($this->product->customfields)) { foreach ($this->product->customfields as $customfield) { if ($customfield->is_cart_attribute) $cartIcone= 'default'; else $cartIcone= 'default-off'; if ($customfield->field_type == 'Z') { $tables['categories'] .= ' <div class="vm_thumb_image"> <span>'.$customfield->display.'</span>'. VirtueMartModelCustomfields::setEditCustomHidden($customfield, $i) .'<div class="vmicon vmicon-16-remove"></div> </div>'; } elseif ($customfield->field_type == 'R') { $tables['products'] .= ' <div class="vm_thumb_image"> <span>'.$customfield->display.'</span>'. VirtueMartModelCustomfields::setEditCustomHidden($customfield, $i) .'<div class="vmicon vmicon-16-remove"></div> </div>'; } elseif ($customfield->field_type == 'G') { // no display (group of) child , handled by plugin; } elseif ($customfield->field_type == 'E'){ $tables['customPlugins'] .= ' <fieldset class="removable"> <legend>'.JText::_($customfield->custom_title).'</legend> <span>'.$customfield->display.$customfield->custom_tip.'</span>'. VirtueMartModelCustomfields::setEditCustomHidden($customfield, $i) .'<span class="vmicon icon-nofloat vmicon-16-'.$cartIcone.'"></span> <span class="vmicon vmicon-16-remove"></span> </fieldset>'; } else { $tables['fields'] .= '<tr class="removable"> <td>'.JText::_($customfield->custom_title).'</td> <td>'.$customfield->custom_tip.'</td> <td>'.$customfield->display.'</td> <td>'.JText::_($this->fieldTypes[$customfield->field_type]). VirtueMartModelCustomfields::setEditCustomHidden($customfield, $i) .'</td> <td> <span class="vmicon vmicon-16-'.$cartIcone.'"></span> </td> <td><span class="vmicon vmicon-16-remove"></span><input class="ordering" type="hidden" value="'.$customfield->ordering.'" name="field['.$i .'][ordering]" /></td> </tr>'; } $i++; } } $emptyTable = ' <tr> <td colspan="7">'.JText::_( 'COM_VIRTUEMART_CUSTOM_NO_TYPES').'</td> <tr>'; ?> <fieldset style="background-color:#F9F9F9;"> <legend><?php echo JText::_('COM_VIRTUEMART_RELATED_CATEGORIES'); ?></legend> <?php echo JText::_('COM_VIRTUEMART_CATEGORIES_RELATED_SEARCH'); ?> <div class="jsonSuggestResults" style="width: auto;"> <input type="text" size="40" name="search" id="relatedcategoriesSearch" value="" /> <button class="reset-value"><?php echo JText::_('COM_VIRTUEMART_RESET') ?></button> </div> <div id="custom_categories"><?php echo $tables['categories']; ?></div> </fieldset> <fieldset style="background-color:#F9F9F9;"> <legend><?php echo JText::_('COM_VIRTUEMART_RELATED_PRODUCTS'); ?></legend> <?php echo JText::_('COM_VIRTUEMART_PRODUCT_RELATED_SEARCH'); ?> <div class="jsonSuggestResults" style="width: auto;"> <input type="text" size="40" name="search" id="relatedproductsSearch" value="" /> <button class="reset-value"><?php echo JText::_('COM_VIRTUEMART_RESET') ?></button> </div> <div id="custom_products"><?php echo $tables['products']; ?></div> </fieldset> <fieldset style="background-color:#F9F9F9;"> <legend><?php echo JText::_('COM_VIRTUEMART_CUSTOM_FIELD_TYPE' );?></legend> <div><?php echo '<div class="inline">'.$this->customsList; ?></div> <table id="custom_fields" class="adminlist" cellspacing="0" cellpadding="0"> <thead> <tr class="row1"> <th><?php echo JText::_('COM_VIRTUEMART_TITLE');?></th> <th><?php echo JText::_('COM_VIRTUEMART_CUSTOM_TIP');?></th> <th><?php echo JText::_('COM_VIRTUEMART_VALUE');?></th> <th><?php echo JText::_('COM_VIRTUEMART_CART_PRICE');?></th> <th><?php echo JText::_('COM_VIRTUEMART_TYPE');?></th> <th><?php echo JText::_('COM_VIRTUEMART_CUSTOM_IS_CART_ATTRIBUTE');?></th> <th><?php echo JText::_('COM_VIRTUEMART_DELETE'); ?></th> </tr> </thead> <tbody id="custom_field"> <?php if ($tables['fields']) echo $tables['fields'] ; else echo $emptyTable; ?> </tbody> </table><!-- custom_fields --> </fieldset> <fieldset style="background-color:#F9F9F9;"> <legend><?php echo JText::_('COM_VIRTUEMART_CUSTOM_EXTENSION'); ?></legend> <div id="custom_customPlugins"><?php echo $tables['customPlugins']; ?></div> </fieldset> </td> </tr> </table> <div style="clear:both;"></div> <script type="text/javascript"> nextCustom = <?php echo $i ?>; jQuery(document).ready(function(){ jQuery('#custom_field').sortable({ update: function(event, ui) { jQuery(this).find('.ordering').each(function(index,element) { jQuery(element).val(index); //console.log(index+' '); }); } }); }); jQuery('select#customlist').chosen().change(function() { selected = jQuery(this).find( 'option:selected').val() ; jQuery.getJSON('index.php?option=com_virtuemart&view=product&task=getData&format=json&type=fields&id='+selected+'&row='+nextCustom+'&virtuemart_product_id=<?php echo $this->product->virtuemart_product_id; ?>', function(data) { jQuery.each(data.value, function(index, value){ jQuery("#custom_"+data.table).append(value); }); }); nextCustom++; }); jQuery('input#relatedproductsSearch').autocomplete({ source: 'index.php?option=com_virtuemart&view=product&task=getData&format=json&type=relatedproducts&row='+nextCustom, select: function(event, ui){ jQuery("#custom_products").append(ui.item.label); nextCustom++; jQuery(this).autocomplete( "option" , 'source' , 'index.php?option=com_virtuemart&view=product&task=getData&format=json&type=relatedproducts&row='+nextCustom ) jQuery('input#relatedcategoriesSearch').autocomplete( "option" , 'source' , 'index.php?option=com_virtuemart&view=product&task=getData&format=json&type=relatedcategories&row='+nextCustom ) }, minLength:1, html: true }); jQuery('input#relatedcategoriesSearch').autocomplete({ source: 'index.php?option=com_virtuemart&view=product&task=getData&format=json&type=relatedcategories&row='+nextCustom, select: function(event, ui){ jQuery("#custom_categories").append(ui.item.label); nextCustom++; jQuery(this).autocomplete( "option" , 'source' , 'index.php?option=com_virtuemart&view=product&task=getData&format=json&type=relatedcategories&row='+nextCustom ) jQuery('input#relatedcategoriesSearch').autocomplete( "option" , 'source' , 'index.php?option=com_virtuemart&view=product&task=getData&format=json&type=relatedproducts&row='+nextCustom ) }, minLength:1, html: true }); // jQuery('#customfieldsTable').delegate('td','click', function() { // jQuery('#customfieldsParent').remove(); // jQuery(this).undelegate('td','click'); // }); // jQuery.each(jQuery('#customfieldsTable').filter(":input").data('events'), function(i, event) { // jQuery.each(event, function(i, handler){ // console.log(handler); // }); // }); eventNames = "click.remove keydown.remove change.remove focus.remove"; // all events you wish to bind to function removeParent() {jQuery('#customfieldsParent').remove();console.log($(this));//jQuery('#customfieldsTable input').unbind(eventNames, removeParent) }; // jQuery('#customfieldsTable input').bind(eventNames, removeParent); // jQuery('#customfieldsTable').delegate('*',eventNames,function(event) { // var $thisCell, $tgt = jQuery(event.target); // console.log (event); // }); jQuery('#customfieldsTable').find('input').each(function(i){ current = jQuery(this); // var dEvents = curent.data('events'); // if (!dEvents) {return;} current.click(function(){ jQuery('#customfieldsParent').remove(); }); //console.log (curent); // jQuery.each(dEvents, function(name, handler){ // if((new RegExp('^(' + (events === '*' ? '.+' : events.replace(',','|').replace(/^on/i,'')) + ')$' ,'i')).test(name)) { // jQuery.each(handler, function(i,handler){ // outputFunction(elem, '\n' + i + ': [' + name + '] : ' + handler ); // }); // } // }); }); //onsole.log(jQuery('#customfieldsTable').data('events')); </script>
Fundacion-AG/PaginaWebFAG
tmp/install_535533f62cdf3/administrator/components/com_virtuemart/views/product/tmpl/product_edit_custom.php
PHP
gpl-2.0
9,558
#region License // Copyright (c) 2013, ClearCanvas Inc. // All rights reserved. // http://www.clearcanvas.ca // // This file is part of the ClearCanvas RIS/PACS open source project. // // The ClearCanvas RIS/PACS open source project is free software: you can // redistribute it and/or modify it under the terms of the GNU General Public // License as published by the Free Software Foundation, either version 3 of the // License, or (at your option) any later version. // // The ClearCanvas RIS/PACS open source project is distributed in the hope that it // will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General // Public License for more details. // // You should have received a copy of the GNU General Public License along with // the ClearCanvas RIS/PACS open source project. If not, see // <http://www.gnu.org/licenses/>. #endregion using System; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Drawing; using System.IO; using System.Text; using System.Windows.Forms; using ClearCanvas.Common; using ClearCanvas.Common.Utilities; using ClearCanvas.Controls.WinForms; using ClearCanvas.Desktop; using ClearCanvas.Desktop.Actions; using ClearCanvas.Desktop.View.WinForms; namespace ClearCanvas.ImageViewer.Explorer.Local.View.WinForms { public partial class LocalImageExplorerControl : UserControl { private LocalImageExplorerComponent _component; private DelayedEventPublisher _folderViewSelectionUpdatePublisher; private Pidl _homeLocation = null; public LocalImageExplorerControl(LocalImageExplorerComponent component) { _component = component; _folderViewSelectionUpdatePublisher = new DelayedEventPublisher(OnFolderViewSelectionUpdatePublished); InitializeComponent(); InitializeHistoryMenu(); InitializeIcons(); InitializeFocusList(); SetViewMode(Settings.Default.FolderViewMode, false); SetHomeLocation(Settings.Default.HomeLocation); _folderView.ExceptionRaised += OnFolderControlExceptionRaised; _folderTree.ExceptionRaised += OnFolderControlExceptionRaised; ResetFocus(); // reset focus must happen after explorer controls are initially populated // Initialize menus here ToolStripBuilder.BuildMenu(_folderViewContextMenu.Items, _component.ContextMenuModel.ChildNodes); ToolStripBuilder.BuildMenu(_folderTreeContextMenu.Items, _component.ContextMenuModel.ChildNodes); } private void PerformDispose(bool disposing) { if (disposing) { // this is a managed wrapper so it should only be disposed on disposing if (_homeLocation != null) { _homeLocation.Dispose(); _homeLocation = null; } if (_folderViewSelectionUpdatePublisher != null) { _folderViewSelectionUpdatePublisher.Dispose(); _folderViewSelectionUpdatePublisher = null; } } } private void OnFolderControlExceptionRaised(object sender, ItemEventArgs<Exception> e) { Exception ex = e.Item; Platform.Log(LogLevel.Debug, "FolderControl exception detected. Last Known Location: {0}", _txtAddress.Text); ExceptionHandler.Report(ex, _component.DesktopWindow); } private void UpdateFolderTreeSelection() { _component.Selection = new PathSelection(_folderTree.SelectedItem); } private void OnFolderViewSelectionUpdatePublished(object sender, EventArgs e) { _component.Selection = new PathSelection(CollectionUtils.Cast<FolderObject>(_folderView.SelectedItems)); } private void OnItemOpened(object sender, EventArgs e) { if (_component.DefaultActionHandler != null) { _component.DefaultActionHandler(); } } private void SetViewMode(System.Windows.Forms.View view, bool saveSetting) { _mnuIconsView.Checked = (view == System.Windows.Forms.View.LargeIcon); _mnuListView.Checked = (view == System.Windows.Forms.View.List); _mnuDetailsView.Checked = (view == System.Windows.Forms.View.Details); _mnuTilesView.Checked = (view == System.Windows.Forms.View.Tile); _folderView.View = view; if (saveSetting) { Settings settings = Settings.Default; settings.FolderViewMode = view; settings.Save(); } } private void SetHomeLocation(string homeLocation) { if (!string.IsNullOrEmpty(homeLocation)) { try { Environment.SpecialFolder specialFolder = (Environment.SpecialFolder) Enum.Parse(typeof (Environment.SpecialFolder), homeLocation); _homeLocation = new Pidl(specialFolder); return; } catch (ArgumentException) {} catch (NotSupportedException) {} catch (Exception ex) { Platform.Log(LogLevel.Debug, ex, "The special folder {0} isn't available.", homeLocation); } Pidl pidl; if (Pidl.TryParse(homeLocation, out pidl)) { _homeLocation = pidl; return; } _homeLocation = null; } } public void BrowseToHome() { if (_homeLocation == null) _folderCoordinator.BrowseToHome(); else _folderCoordinator.BrowseTo(_homeLocation); } #region Tab Order private delegate bool FocusDelegate(bool forward); private IList<KeyValuePair<Control, FocusDelegate>> _focusDelegates = null; private void InitializeFocusList() { // initialize control focus list List<KeyValuePair<Control, FocusDelegate>> focusDelegates = new List<KeyValuePair<Control, FocusDelegate>>(3); focusDelegates.Add(new KeyValuePair<Control, FocusDelegate>(_folderTree, f => _folderTree.SelectNextControl(_folderTree, f, false, true, false))); focusDelegates.Add(new KeyValuePair<Control, FocusDelegate>(_folderView, f => _folderView.SelectNextControl(_folderView, f, false, true, false))); focusDelegates.Add(new KeyValuePair<Control, FocusDelegate>(_addressStrip, f => { _txtAddress.Focus(); return _addressStrip.ContainsFocus; })); _focusDelegates = focusDelegates.AsReadOnly(); } private void ResetFocus() { if (_focusDelegates.Count > 0) _focusDelegates[0].Value.Invoke(true); } protected override bool ProcessTabKey(bool forward) { // overrides the tab order using the focus delegates list int indexFocusedControl = 0; while (indexFocusedControl < _focusDelegates.Count) { // find the control that is currently focused if (_focusDelegates[indexFocusedControl].Key.ContainsFocus) { // try to focus the next control in sequence for (int offset = 1; offset < _focusDelegates.Count; offset++) { int index = (indexFocusedControl + (forward ? offset : _focusDelegates.Count - offset))%_focusDelegates.Count; if (_focusDelegates[index].Value.Invoke(forward)) break; // end loop on first control that successfully focused } return true; } indexFocusedControl++; } return base.ProcessTabKey(forward); } #endregion #region Explorer Control private const int ShowHistoryCount = 10; private string _lastValidLocation = string.Empty; private static void InitializeImageList(ImageList imageList, string sizeString) { Type type = typeof (LocalImageExplorerControl); var resourceResolver = new ActionResourceResolver(type); string[] icons = {"Back", "Next", "Up", "Refresh", "Home", "ShowFolders", "View", "Go"}; foreach (string iconName in icons) { var resourceName = string.Format("{0}.Icons.{1}Tool{2}.png", type.Namespace, iconName, sizeString); using (var ioStream = resourceResolver.OpenResource(resourceName)) { if (ioStream == null) continue; imageList.Images.Add(iconName, Image.FromStream(ioStream)); } } } private ImageList GetImageList(IconSize iconSize) { if (iconSize == IconSize.Small) return _smallIconImageList; if (iconSize == IconSize.Medium) return _mediumIconImageList; return _largeIconImageList; } private void InitializeIcons() { InitializeImageList(_largeIconImageList, "Large"); InitializeImageList(_mediumIconImageList, "Medium"); InitializeImageList(_smallIconImageList, "Small"); _toolStrip.ImageList = GetImageList(Settings.Default.ToolbarIconSize); _btnBack.ImageKey = @"Back"; _btnForward.ImageKey = @"Next"; _btnUp.ImageKey = @"Up"; _btnRefresh.ImageKey = @"Refresh"; _btnHome.ImageKey = @"Home"; _btnShowFolders.ImageKey = @"ShowFolders"; _btnViews.ImageKey = @"View"; _addressStrip.ImageList = _smallIconImageList; _btnGo.ImageKey = @"Go"; } private void InitializeHistoryMenu() { for (int n = 0; n < ShowHistoryCount; n++) { ToolStripMenuItem menuBack = new ToolStripMenuItem(); menuBack.Click += _mnuHistoryItem_Click; menuBack.Tag = -(n + 1); menuBack.Visible = false; _btnBack.DropDownItems.Add(menuBack); ToolStripMenuItem menuForward = new ToolStripMenuItem(); menuForward.Click += _mnuHistoryItem_Click; menuForward.Tag = n + 1; menuForward.Visible = false; _btnForward.DropDownItems.Add(menuForward); } } protected override void OnLoad(EventArgs e) { try { this.BrowseToHome(); } catch (Exception ex) { OnFolderControlExceptionRaised(null, new ItemEventArgs<Exception>(ex)); } base.OnLoad(e); } private void _folderCoordinator_CurrentPidlChanged(object sender, EventArgs e) { _btnUp.Enabled = _folderCoordinator.CanBrowseToParent; _btnBack.Enabled = _folderCoordinator.CanBrowseToPrevious; _btnForward.Enabled = _folderCoordinator.CanBrowseToNext; _lastValidLocation = _txtAddress.Text = _folderCoordinator.CurrentPath; this.Text = _folderCoordinator.CurrentDisplayName; this.UpdateBackButtonMenu(); this.UpdateForwardButtonMenu(); } private void UpdateBackButtonMenu() { int count = 0; foreach (Pidl pastPidl in _folderCoordinator.EnumeratePreviousLocations(false)) { if (count >= ShowHistoryCount) break; _btnBack.DropDownItems[count].Text = pastPidl.DisplayName; _btnBack.DropDownItems[count].Visible = true; count++; } for (int n = count; n < ShowHistoryCount; n++) _btnBack.DropDownItems[n].Visible = false; } private void UpdateForwardButtonMenu() { int count = 0; foreach (Pidl futurePidl in _folderCoordinator.EnumerateNextLocations(false)) { if (count >= ShowHistoryCount) break; _btnForward.DropDownItems[count].Text = futurePidl.DisplayName; _btnForward.DropDownItems[count].Visible = true; count++; } for (int n = count; n < ShowHistoryCount; n++) _btnForward.DropDownItems[n].Visible = false; } private void _btnUp_Click(object sender, EventArgs e) { this.Cursor = Cursors.WaitCursor; try { _folderCoordinator.BrowseToParent(); } catch (Exception ex) { OnFolderControlExceptionRaised(null, new ItemEventArgs<Exception>(ex)); } finally { this.ResetCursor(); } } private void _btnBack_Click(object sender, EventArgs e) { this.Cursor = Cursors.WaitCursor; try { _folderCoordinator.BrowseToPrevious(); } catch (Exception ex) { OnFolderControlExceptionRaised(null, new ItemEventArgs<Exception>(ex)); } finally { this.ResetCursor(); } } private void _btnForward_Click(object sender, EventArgs e) { this.Cursor = Cursors.WaitCursor; try { _folderCoordinator.BrowseToNext(); } catch (Exception ex) { OnFolderControlExceptionRaised(null, new ItemEventArgs<Exception>(ex)); } finally { this.ResetCursor(); } } private void _btnHome_Click(object sender, EventArgs e) { this.Cursor = Cursors.WaitCursor; try { this.BrowseToHome(); } catch (Exception ex) { OnFolderControlExceptionRaised(null, new ItemEventArgs<Exception>(ex)); } finally { this.ResetCursor(); } } private void _btnRefresh_Click(object sender, EventArgs e) { this.Cursor = Cursors.WaitCursor; try { _folderCoordinator.Refresh(); } catch(Exception ex) { OnFolderControlExceptionRaised(null, new ItemEventArgs<Exception>(ex)); } finally { this.ResetCursor(); } } private void _btnGo_Click(object sender, EventArgs e) { this.Cursor = Cursors.WaitCursor; try { if (!string.IsNullOrEmpty(_txtAddress.Text)) _folderCoordinator.BrowseTo(_txtAddress.Text); } catch (Exception ex) { this.OnFolderControlExceptionRaised(null, new ItemEventArgs<Exception>(ex)); _txtAddress.Text = _lastValidLocation; } finally { this.ResetCursor(); } } private void _txtAddress_KeyEnterPressed(object sender, EventArgs e) { _btnGo.PerformClick(); } private void _mnuHistoryItem_Click(object sender, EventArgs e) { this.Cursor = Cursors.WaitCursor; try { _folderCoordinator.BrowseTo((int) ((ToolStripMenuItem) sender).Tag); } catch (Exception ex) { OnFolderControlExceptionRaised(null, new ItemEventArgs<Exception>(ex)); } finally { this.ResetCursor(); } } private void _btnShowFolders_Click(object sender, EventArgs e) { _btnShowFolders.Checked = !_btnShowFolders.Checked; _folderTree.Visible = _splitter.Visible = _btnShowFolders.Checked; } private void _mnuTilesView_Click(object sender, EventArgs e) { SetViewMode(System.Windows.Forms.View.Tile, true); } private void _mnuIconsView_Click(object sender, EventArgs e) { SetViewMode(System.Windows.Forms.View.LargeIcon, true); } private void _mnuListView_Click(object sender, EventArgs e) { SetViewMode(System.Windows.Forms.View.List, true); } private void _mnuDetailsView_Click(object sender, EventArgs e) { SetViewMode(System.Windows.Forms.View.Details, true); } private void _folderView_ItemDoubleClick(object sender, FolderViewItemEventArgs e) { if (!e.Item.IsFolder) { _folderViewSelectionUpdatePublisher.PublishNow(sender, e); OnItemOpened(sender, e); e.Handled = true; } } private void _folderView_SelectedItemsChanged(object sender, EventArgs e) { // listview-type controls fire the event for each item in the selection // (because each item selection change is conceptually separate in this type of GUI) // this can generate a lot of unecessary calls to update the component's selection // so we delay the event here until the selection settles down _folderViewSelectionUpdatePublisher.Publish(sender, e); } private void _folderTree_SelectedItemsChanged(object sender, EventArgs e) { UpdateFolderTreeSelection(); } private void _folderControl_KeyDown(object sender, KeyEventArgs e) { if (e.KeyData == (Keys.Control | Keys.A)) { _folderView.SelectNextControl(_folderView, true, false, true, false); _folderView.SelectAll(); e.Handled = true; e.SuppressKeyPress = true; } } private void _folderControl_BeginBrowse(object sender, EventArgs e) { this.Cursor = Cursors.WaitCursor; } private void _folderControl_EndBrowse(object sender, EventArgs e) { this.ResetCursor(); } private void _folderViewContextMenu_Opening(object sender, CancelEventArgs e) { _folderViewSelectionUpdatePublisher.PublishNow(sender, e); } private void _folderTreeContextMenu_Opening(object sender, CancelEventArgs e) { UpdateFolderTreeSelection(); } #endregion #region ExceptionPolicy Class [ExceptionPolicyFor(typeof (PathNotFoundException))] [ExceptionPolicyFor(typeof (PathAccessException))] [ExtensionOf(typeof (ExceptionPolicyExtensionPoint))] private class ExceptionPolicy : IExceptionPolicy { public void Handle(Exception ex, IExceptionHandlingContext exceptionHandlingContext) { if (ex is PathNotFoundException) Handle((PathNotFoundException) ex, exceptionHandlingContext); else if (ex is PathAccessException) Handle((PathAccessException) ex, exceptionHandlingContext); } private static void Handle(PathNotFoundException ex, IExceptionHandlingContext exceptionHandlingContext) { var sb = new StringBuilder(); sb.AppendLine(SR.ErrorPathUnavailable); if (!string.IsNullOrEmpty(ex.Path)) sb.AppendLine(string.Format(SR.FormatPath, ex.Path)); exceptionHandlingContext.ShowMessageBox(sb.ToString()); } private static void Handle(PathAccessException ex, IExceptionHandlingContext exceptionHandlingContext) { var sb = new StringBuilder(); sb.AppendLine(SR.ErrorPathSecurity); if (!string.IsNullOrEmpty(ex.Path)) sb.AppendLine(string.Format(SR.FormatPath, ex.Path)); exceptionHandlingContext.ShowMessageBox(sb.ToString()); } } #endregion #region PathSelection Class /// <summary> /// Custom <see cref="IPathSelection"/> implementation that allows for delayed shortcut resolution. /// </summary> /// <remarks> /// Resolving shortcuts can be expensive, so always call at the last possible moment (in conjunction with a user GUI action, preferably). /// </remarks> private class PathSelection : Selection<FolderObject>, IPathSelection { public PathSelection(FolderObject item) : base(item) {} public PathSelection(IEnumerable<FolderObject> folderObjects) : base(folderObjects) {} public string this[int index] { get { return Items[index].GetPath(true); } } public bool Contains(string path) { foreach (var item in Items) if (string.Equals(path, item.GetPath(true), StringComparison.InvariantCultureIgnoreCase)) return true; return false; } public new IEnumerator<string> GetEnumerator() { foreach (var item in Items) yield return item.GetPath(true); } IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } } #endregion } }
chinapacs/ImageViewer
ImageViewer/Explorer/Local/View/WinForms/LocalImageExplorerControl.cs
C#
gpl-3.0
18,653
<?php class ExternalStoreForTesting { protected $data = [ 'cluster1' => [ '200' => 'Hello', '300' => [ 'Hello', 'World', ], // gzip string below generated with gzdeflate( 'AAAABBAAA' ) '12345' => "sttttr\002\022\000", ], ]; /** * Fetch data from given URL * @param string $url An url of the form FOO://cluster/id or FOO://cluster/id/itemid. * @return mixed */ public function fetchFromURL( $url ) { // Based on ExternalStoreDB $path = explode( '/', $url ); $cluster = $path[2]; $id = $path[3]; if ( isset( $path[4] ) ) { $itemID = $path[4]; } else { $itemID = false; } if ( !isset( $this->data[$cluster][$id] ) ) { return null; } if ( $itemID !== false && is_array( $this->data[$cluster][$id] ) && isset( $this->data[$cluster][$id][$itemID] ) ) { return $this->data[$cluster][$id][$itemID]; } return $this->data[$cluster][$id]; } }
kylethayer/bioladder
wiki/tests/phpunit/includes/externalstore/ExternalStoreForTesting.php
PHP
gpl-3.0
919
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.0-b52-fcs // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2013.12.20 at 12:48:21 PM GMT // package weka.core.pmml.jaxbbindings; import java.math.BigDecimal; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for AnovaRow element declaration. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;element name="AnovaRow"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element ref="{http://www.dmg.org/PMML-4_1}Extension" maxOccurs="unbounded" minOccurs="0"/> * &lt;/sequence> * &lt;attribute name="degreesOfFreedom" use="required" type="{http://www.dmg.org/PMML-4_1}NUMBER" /> * &lt;attribute name="fValue" type="{http://www.dmg.org/PMML-4_1}NUMBER" /> * &lt;attribute name="meanOfSquares" type="{http://www.dmg.org/PMML-4_1}NUMBER" /> * &lt;attribute name="pValue" type="{http://www.dmg.org/PMML-4_1}PROB-NUMBER" /> * &lt;attribute name="sumOfSquares" use="required" type="{http://www.dmg.org/PMML-4_1}NUMBER" /> * &lt;attribute name="type" use="required"> * &lt;simpleType> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}string"> * &lt;enumeration value="Model"/> * &lt;enumeration value="Error"/> * &lt;enumeration value="Total"/> * &lt;/restriction> * &lt;/simpleType> * &lt;/attribute> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "extension" }) @XmlRootElement(name = "AnovaRow") public class AnovaRow { @XmlElement(name = "Extension", namespace = "http://www.dmg.org/PMML-4_1", required = true) protected List<Extension> extension; @XmlAttribute(required = true) protected double degreesOfFreedom; @XmlAttribute protected Double fValue; @XmlAttribute protected Double meanOfSquares; @XmlAttribute protected BigDecimal pValue; @XmlAttribute(required = true) protected double sumOfSquares; @XmlAttribute(required = true) protected String type; /** * Gets the value of the extension property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the extension property. * * <p> * For example, to add a new item, do as follows: * <pre> * getExtension().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Extension } * * */ public List<Extension> getExtension() { if (extension == null) { extension = new ArrayList<Extension>(); } return this.extension; } /** * Gets the value of the degreesOfFreedom property. * */ public double getDegreesOfFreedom() { return degreesOfFreedom; } /** * Sets the value of the degreesOfFreedom property. * */ public void setDegreesOfFreedom(double value) { this.degreesOfFreedom = value; } /** * Gets the value of the fValue property. * * @return * possible object is * {@link Double } * */ public Double getFValue() { return fValue; } /** * Sets the value of the fValue property. * * @param value * allowed object is * {@link Double } * */ public void setFValue(Double value) { this.fValue = value; } /** * Gets the value of the meanOfSquares property. * * @return * possible object is * {@link Double } * */ public Double getMeanOfSquares() { return meanOfSquares; } /** * Sets the value of the meanOfSquares property. * * @param value * allowed object is * {@link Double } * */ public void setMeanOfSquares(Double value) { this.meanOfSquares = value; } /** * Gets the value of the pValue property. * * @return * possible object is * {@link BigDecimal } * */ public BigDecimal getPValue() { return pValue; } /** * Sets the value of the pValue property. * * @param value * allowed object is * {@link BigDecimal } * */ public void setPValue(BigDecimal value) { this.pValue = value; } /** * Gets the value of the sumOfSquares property. * */ public double getSumOfSquares() { return sumOfSquares; } /** * Sets the value of the sumOfSquares property. * */ public void setSumOfSquares(double value) { this.sumOfSquares = value; } /** * Gets the value of the type property. * * @return * possible object is * {@link String } * */ public String getType() { return type; } /** * Sets the value of the type property. * * @param value * allowed object is * {@link String } * */ public void setType(String value) { this.type = value; } }
mydzigear/weka.kmeanspp.silhouette_score
src/weka/core/pmml/jaxbbindings/AnovaRow.java
Java
gpl-3.0
6,195
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.codec.net; import java.io.UnsupportedEncodingException; import java.nio.charset.Charset; import java.util.BitSet; import org.apache.commons.codec.Charsets; import org.apache.commons.codec.DecoderException; import org.apache.commons.codec.EncoderException; import org.apache.commons.codec.StringDecoder; import org.apache.commons.codec.StringEncoder; /** * Similar to the Quoted-Printable content-transfer-encoding defined in * <a href="http://www.ietf.org/rfc/rfc1521.txt">RFC 1521</a> and designed to allow text containing mostly ASCII * characters to be decipherable on an ASCII terminal without decoding. * <p> * <a href="http://www.ietf.org/rfc/rfc1522.txt">RFC 1522</a> describes techniques to allow the encoding of non-ASCII * text in various portions of a RFC 822 [2] message header, in a manner which is unlikely to confuse existing message * handling software. * <p> * This class is conditionally thread-safe. * The instance field {@link #encodeBlanks} is mutable {@link #setEncodeBlanks(boolean)} * but is not volatile, and accesses are not synchronised. * If an instance of the class is shared between threads, the caller needs to ensure that suitable synchronisation * is used to ensure safe publication of the value between threads, and must not invoke * {@link #setEncodeBlanks(boolean)} after initial setup. * * @see <a href="http://www.ietf.org/rfc/rfc1522.txt">MIME (Multipurpose Internet Mail Extensions) Part Two: Message * Header Extensions for Non-ASCII Text</a> * * @since 1.3 * @version $Id$ */ public class QCodec extends RFC1522Codec implements StringEncoder, StringDecoder { /** * The default charset used for string decoding and encoding. */ private final Charset charset; /** * BitSet of printable characters as defined in RFC 1522. */ private static final BitSet PRINTABLE_CHARS = new BitSet(256); // Static initializer for printable chars collection static { // alpha characters PRINTABLE_CHARS.set(' '); PRINTABLE_CHARS.set('!'); PRINTABLE_CHARS.set('"'); PRINTABLE_CHARS.set('#'); PRINTABLE_CHARS.set('$'); PRINTABLE_CHARS.set('%'); PRINTABLE_CHARS.set('&'); PRINTABLE_CHARS.set('\''); PRINTABLE_CHARS.set('('); PRINTABLE_CHARS.set(')'); PRINTABLE_CHARS.set('*'); PRINTABLE_CHARS.set('+'); PRINTABLE_CHARS.set(','); PRINTABLE_CHARS.set('-'); PRINTABLE_CHARS.set('.'); PRINTABLE_CHARS.set('/'); for (int i = '0'; i <= '9'; i++) { PRINTABLE_CHARS.set(i); } PRINTABLE_CHARS.set(':'); PRINTABLE_CHARS.set(';'); PRINTABLE_CHARS.set('<'); PRINTABLE_CHARS.set('>'); PRINTABLE_CHARS.set('@'); for (int i = 'A'; i <= 'Z'; i++) { PRINTABLE_CHARS.set(i); } PRINTABLE_CHARS.set('['); PRINTABLE_CHARS.set('\\'); PRINTABLE_CHARS.set(']'); PRINTABLE_CHARS.set('^'); PRINTABLE_CHARS.set('`'); for (int i = 'a'; i <= 'z'; i++) { PRINTABLE_CHARS.set(i); } PRINTABLE_CHARS.set('{'); PRINTABLE_CHARS.set('|'); PRINTABLE_CHARS.set('}'); PRINTABLE_CHARS.set('~'); } private static final byte BLANK = 32; private static final byte UNDERSCORE = 95; private boolean encodeBlanks = false; /** * Default constructor. */ public QCodec() { this(Charsets.UTF_8); } /** * Constructor which allows for the selection of a default charset. * * @param charset * the default string charset to use. * * @see <a href="http://download.oracle.com/javase/6/docs/api/java/nio/charset/Charset.html">Standard charsets</a> * @since 1.7 */ public QCodec(final Charset charset) { super(); this.charset = charset; } /** * Constructor which allows for the selection of a default charset. * * @param charsetName * the charset to use. * @throws java.nio.charset.UnsupportedCharsetException * If the named charset is unavailable * @since 1.7 throws UnsupportedCharsetException if the named charset is unavailable * @see <a href="http://download.oracle.com/javase/6/docs/api/java/nio/charset/Charset.html">Standard charsets</a> */ public QCodec(final String charsetName) { this(Charset.forName(charsetName)); } @Override protected String getEncoding() { return "Q"; } @Override protected byte[] doEncoding(final byte[] bytes) { if (bytes == null) { return null; } final byte[] data = QuotedPrintableCodec.encodeQuotedPrintable(PRINTABLE_CHARS, bytes); if (this.encodeBlanks) { for (int i = 0; i < data.length; i++) { if (data[i] == BLANK) { data[i] = UNDERSCORE; } } } return data; } @Override protected byte[] doDecoding(final byte[] bytes) throws DecoderException { if (bytes == null) { return null; } boolean hasUnderscores = false; for (final byte b : bytes) { if (b == UNDERSCORE) { hasUnderscores = true; break; } } if (hasUnderscores) { final byte[] tmp = new byte[bytes.length]; for (int i = 0; i < bytes.length; i++) { final byte b = bytes[i]; if (b != UNDERSCORE) { tmp[i] = b; } else { tmp[i] = BLANK; } } return QuotedPrintableCodec.decodeQuotedPrintable(tmp); } return QuotedPrintableCodec.decodeQuotedPrintable(bytes); } /** * Encodes a string into its quoted-printable form using the specified charset. Unsafe characters are escaped. * * @param str * string to convert to quoted-printable form * @param charset * the charset for str * @return quoted-printable string * @throws EncoderException * thrown if a failure condition is encountered during the encoding process. * @since 1.7 */ public String encode(final String str, final Charset charset) throws EncoderException { if (str == null) { return null; } return encodeText(str, charset); } /** * Encodes a string into its quoted-printable form using the specified charset. Unsafe characters are escaped. * * @param str * string to convert to quoted-printable form * @param charset * the charset for str * @return quoted-printable string * @throws EncoderException * thrown if a failure condition is encountered during the encoding process. */ public String encode(final String str, final String charset) throws EncoderException { if (str == null) { return null; } try { return encodeText(str, charset); } catch (final UnsupportedEncodingException e) { throw new EncoderException(e.getMessage(), e); } } /** * Encodes a string into its quoted-printable form using the default charset. Unsafe characters are escaped. * * @param str * string to convert to quoted-printable form * @return quoted-printable string * @throws EncoderException * thrown if a failure condition is encountered during the encoding process. */ @Override public String encode(final String str) throws EncoderException { if (str == null) { return null; } return encode(str, getCharset()); } /** * Decodes a quoted-printable string into its original form. Escaped characters are converted back to their original * representation. * * @param str * quoted-printable string to convert into its original form * @return original string * @throws DecoderException * A decoder exception is thrown if a failure condition is encountered during the decode process. */ @Override public String decode(final String str) throws DecoderException { if (str == null) { return null; } try { return decodeText(str); } catch (final UnsupportedEncodingException e) { throw new DecoderException(e.getMessage(), e); } } /** * Encodes an object into its quoted-printable form using the default charset. Unsafe characters are escaped. * * @param obj * object to convert to quoted-printable form * @return quoted-printable object * @throws EncoderException * thrown if a failure condition is encountered during the encoding process. */ @Override public Object encode(final Object obj) throws EncoderException { if (obj == null) { return null; } else if (obj instanceof String) { return encode((String) obj); } else { throw new EncoderException("Objects of type " + obj.getClass().getName() + " cannot be encoded using Q codec"); } } /** * Decodes a quoted-printable object into its original form. Escaped characters are converted back to their original * representation. * * @param obj * quoted-printable object to convert into its original form * @return original object * @throws DecoderException * Thrown if the argument is not a <code>String</code>. Thrown if a failure condition is encountered * during the decode process. */ @Override public Object decode(final Object obj) throws DecoderException { if (obj == null) { return null; } else if (obj instanceof String) { return decode((String) obj); } else { throw new DecoderException("Objects of type " + obj.getClass().getName() + " cannot be decoded using Q codec"); } } /** * Gets the default charset name used for string decoding and encoding. * * @return the default charset name * @since 1.7 */ public Charset getCharset() { return this.charset; } /** * Gets the default charset name used for string decoding and encoding. * * @return the default charset name */ public String getDefaultCharset() { return this.charset.name(); } /** * Tests if optional transformation of SPACE characters is to be used * * @return <code>true</code> if SPACE characters are to be transformed, <code>false</code> otherwise */ public boolean isEncodeBlanks() { return this.encodeBlanks; } /** * Defines whether optional transformation of SPACE characters is to be used * * @param b * <code>true</code> if SPACE characters are to be transformed, <code>false</code> otherwise */ public void setEncodeBlanks(final boolean b) { this.encodeBlanks = b; } }
foreni-packages/bytecode-viewer
src/org/apache/commons/codec/net/QCodec.java
Java
gpl-3.0
12,271
<?php // This file is part of Moodle - http://moodle.org/ // // Moodle is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // Moodle is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with Moodle. If not, see <http://www.gnu.org/licenses/>. ?> this.connectPrereqCallback = { success: function(o) { YUI.use('yui2-treeview', 'yui2-layout', function(Y) { scorm_tree_node = Y.YUI2.widget.TreeView.getTree('scorm_tree'); if (o.responseText !== undefined) { //alert('got a response: ' + o.responseText); if (scorm_tree_node && o.responseText) { var hnode = scorm_tree_node.getHighlightedNode(); var hidx = null; if (hnode) { hidx = hnode.index + scorm_tree_node.getNodeCount(); } // all gone var root_node = scorm_tree_node.getRoot(); while (root_node.children.length > 0) { scorm_tree_node.removeNode(root_node.children[0]); } } // make sure the temporary tree element is not there var el_old_tree = document.getElementById('scormtree123'); if (el_old_tree) { el_old_tree.parentNode.removeChild(el_old_tree); } var el_new_tree = document.createElement('div'); var pagecontent = document.getElementById("page-content"); el_new_tree.setAttribute('id','scormtree123'); el_new_tree.innerHTML = o.responseText; // make sure it doesnt show el_new_tree.style.display = 'none'; pagecontent.appendChild(el_new_tree) // ignore the first level element as this is the title var startNode = el_new_tree.firstChild.firstChild; if (startNode.tagName == 'LI') { // go back to the beginning startNode = el_new_tree; } //var sXML = new XMLSerializer().serializeToString(startNode); scorm_tree_node.buildTreeFromMarkup('scormtree123'); var el = document.getElementById('scormtree123'); el.parentNode.removeChild(el); scorm_tree_node.expandAll(); scorm_tree_node.render(); if (hidx != null) { hnode = scorm_tree_node.getNodeByIndex(hidx); if (hnode) { hnode.highlight(); scorm_layout_widget = Y.YUI2.widget.Layout.getLayoutById('scorm_layout'); var left = scorm_layout_widget.getUnitByPosition('left'); if (left.expanded) { hnode.focus(); } } } } }); }, failure: function(o) { // do some sort of error handling var sURL = "<?php echo $CFG->wwwroot; ?>" + "/mod/scorm/prereqs.php?a=<?php echo $scorm->id ?>&scoid=<?php echo $scoid ?>&attempt=<?php echo $attempt ?>&mode=<?php echo $mode ?>&currentorg=<?php echo $currentorg ?>&sesskey=<?php echo sesskey(); ?>"; //TODO: Enable this error handing correctly - avoiding issues when closing player MDL-23470 //alert('Prerequisites update failed - must restart SCORM player'); //window.location.href = sURL; } };
chiefdome/integration
mod/scorm/datamodels/callback.js.php
PHP
gpl-3.0
4,242
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- # # Copyright (C) 2016 Canonical Ltd # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import integration_tests class LoginLogoutTestCase(integration_tests.StoreTestCase): def test_successful_login(self): self.addCleanup(self.logout) self.login(expect_success=True) def test_failed_login(self): self.login( 'u1test+snapcraft@canonical.com', 'wrongpassword', expect_success=False)
tsdgeos/snapcraft
integration_tests/test_store_login_logout.py
Python
gpl-3.0
1,032
// This file is part of Moodle - http://moodle.org/ // // Moodle is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // Moodle is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with Moodle. If not, see <http://www.gnu.org/licenses/>. /* jshint node: true, browser: false */ /* eslint-env node */ /** * @copyright 2014 Andrew Nicols * @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later */ /** * Grunt configuration */ module.exports = function(grunt) { var path = require('path'), tasks = {}, cwd = process.env.PWD || process.cwd(), async = require('async'), DOMParser = require('xmldom').DOMParser, xpath = require('xpath'), semver = require('semver'); // Verify the node version is new enough. var expected = semver.validRange(grunt.file.readJSON('package.json').engines.node); var actual = semver.valid(process.version); if (!semver.satisfies(actual, expected)) { grunt.fail.fatal('Node version too old. Require ' + expected + ', version installed: ' + actual); } // Windows users can't run grunt in a subdirectory, so allow them to set // the root by passing --root=path/to/dir. if (grunt.option('root')) { var root = grunt.option('root'); if (grunt.file.exists(__dirname, root)) { cwd = path.join(__dirname, root); grunt.log.ok('Setting root to ' + cwd); } else { grunt.fail.fatal('Setting root to ' + root + ' failed - path does not exist'); } } var inAMD = path.basename(cwd) == 'amd'; // Globbing pattern for matching all AMD JS source files. var amdSrc = [inAMD ? cwd + '/src/*.js' : '**/amd/src/*.js']; /** * Function to generate the destination for the uglify task * (e.g. build/file.min.js). This function will be passed to * the rename property of files array when building dynamically: * http://gruntjs.com/configuring-tasks#building-the-files-object-dynamically * * @param {String} destPath the current destination * @param {String} srcPath the matched src path * @return {String} The rewritten destination path. */ var uglifyRename = function(destPath, srcPath) { destPath = srcPath.replace('src', 'build'); destPath = destPath.replace('.js', '.min.js'); destPath = path.resolve(cwd, destPath); return destPath; }; /** * Find thirdpartylibs.xml and generate an array of paths contained within * them (used to generate ignore files and so on). * * @return {array} The list of thirdparty paths. */ var getThirdPartyPathsFromXML = function() { var thirdpartyfiles = grunt.file.expand('*/**/thirdpartylibs.xml'); var libs = ['node_modules/', 'vendor/']; thirdpartyfiles.forEach(function(file) { var dirname = path.dirname(file); var doc = new DOMParser().parseFromString(grunt.file.read(file)); var nodes = xpath.select("/libraries/library/location/text()", doc); nodes.forEach(function(node) { var lib = path.join(dirname, node.toString()); if (grunt.file.isDir(lib)) { // Ensure trailing slash on dirs. lib = lib.replace(/\/?$/, '/'); } // Look for duplicate paths before adding to array. if (libs.indexOf(lib) === -1) { libs.push(lib); } }); }); return libs; }; // Project configuration. grunt.initConfig({ eslint: { // Even though warnings dont stop the build we don't display warnings by default because // at this moment we've got too many core warnings. options: {quiet: !grunt.option('show-lint-warnings')}, amd: { src: amdSrc, // Check AMD with some slightly stricter rules. rules: { 'no-unused-vars': 'error', 'no-implicit-globals': 'error' } }, // Check YUI module source files. yui: { src: ['**/yui/src/**/*.js', '!*/**/yui/src/*/meta/*.js'], options: { // Disable some rules which we can't safely define for YUI rollups. rules: { 'no-undef': 'off', 'no-unused-vars': 'off', 'no-unused-expressions': 'off' } } } }, uglify: { amd: { files: [{ expand: true, src: amdSrc, rename: uglifyRename }], options: {report: 'none'} } }, less: { bootstrapbase: { files: { "theme/bootstrapbase/style/moodle.css": "theme/bootstrapbase/less/moodle.less", "theme/bootstrapbase/style/editor.css": "theme/bootstrapbase/less/editor.less", }, options: { compress: false // We must not compress to keep the comments. } } }, watch: { options: { nospawn: true // We need not to spawn so config can be changed dynamically. }, amd: { files: ['**/amd/src/**/*.js'], tasks: ['amd'] }, bootstrapbase: { files: ["theme/bootstrapbase/less/**/*.less"], tasks: ["css"] }, yui: { files: ['**/yui/src/**/*.js'], tasks: ['yui'] }, gherkinlint: { files: ['**/tests/behat/*.feature'], tasks: ['gherkinlint'] } }, shifter: { options: { recursive: true, paths: [cwd] } }, gherkinlint: { options: { files: ['**/tests/behat/*.feature'], } }, stylelint: { less: { options: { syntax: 'less', configOverrides: { rules: { // These rules have to be disabled in .stylelintrc for scss compat. "at-rule-no-unknown": true, "no-browser-hacks": [true, {"severity": "warning"}] } } }, src: ['theme/**/*.less'] }, scss: { options: {syntax: 'scss'}, src: ['*/**/*.scss'] }, css: { src: ['*/**/*.css'], options: { configOverrides: { rules: { // These rules have to be disabled in .stylelintrc for scss compat. "at-rule-no-unknown": true, "no-browser-hacks": [true, {"severity": "warning"}] } } } } } }); /** * Generate ignore files (utilising thirdpartylibs.xml data) */ tasks.ignorefiles = function() { // An array of paths to third party directories. var thirdPartyPaths = getThirdPartyPathsFromXML(); // Generate .eslintignore. var eslintIgnores = ['# Generated by "grunt ignorefiles"', '*/**/yui/src/*/meta/', '*/**/build/'].concat(thirdPartyPaths); grunt.file.write('.eslintignore', eslintIgnores.join('\n')); // Generate .stylelintignore. var stylelintIgnores = [ '# Generated by "grunt ignorefiles"', 'theme/bootstrapbase/style/', 'theme/clean/style/custom.css', 'theme/more/style/custom.css' ].concat(thirdPartyPaths); grunt.file.write('.stylelintignore', stylelintIgnores.join('\n')); }; /** * Shifter task. Is configured with a path to a specific file or a directory, * in the case of a specific file it will work out the right module to be built. * * Note that this task runs the invidiaul shifter jobs async (becase it spawns * so be careful to to call done(). */ tasks.shifter = function() { var done = this.async(), options = grunt.config('shifter.options'); // Run the shifter processes one at a time to avoid confusing output. async.eachSeries(options.paths, function(src, filedone) { var args = []; args.push(path.normalize(__dirname + '/node_modules/shifter/bin/shifter')); // Always ignore the node_modules directory. args.push('--excludes', 'node_modules'); // Determine the most appropriate options to run with based upon the current location. if (grunt.file.isMatch('**/yui/**/*.js', src)) { // When passed a JS file, build our containing module (this happen with // watch). grunt.log.debug('Shifter passed a specific JS file'); src = path.dirname(path.dirname(src)); options.recursive = false; } else if (grunt.file.isMatch('**/yui/src', src)) { // When in a src directory --walk all modules. grunt.log.debug('In a src directory'); args.push('--walk'); options.recursive = false; } else if (grunt.file.isMatch('**/yui/src/*', src)) { // When in module, only build our module. grunt.log.debug('In a module directory'); options.recursive = false; } else if (grunt.file.isMatch('**/yui/src/*/js', src)) { // When in module src, only build our module. grunt.log.debug('In a source directory'); src = path.dirname(src); options.recursive = false; } if (grunt.option('watch')) { grunt.fail.fatal('The --watch option has been removed, please use `grunt watch` instead'); } // Add the stderr option if appropriate if (grunt.option('verbose')) { args.push('--lint-stderr'); } if (grunt.option('no-color')) { args.push('--color=false'); } var execShifter = function() { grunt.log.ok("Running shifter on " + src); grunt.util.spawn({ cmd: "node", args: args, opts: {cwd: src, stdio: 'inherit', env: process.env} }, function(error, result, code) { if (code) { grunt.fail.fatal('Shifter failed with code: ' + code); } else { grunt.log.ok('Shifter build complete.'); filedone(); } }); }; // Actually run shifter. if (!options.recursive) { execShifter(); } else { // Check that there are yui modules otherwise shifter ends with exit code 1. if (grunt.file.expand({cwd: src}, '**/yui/src/**/*.js').length > 0) { args.push('--recursive'); execShifter(); } else { grunt.log.ok('No YUI modules to build.'); filedone(); } } }, done); }; tasks.gherkinlint = function() { var done = this.async(), options = grunt.config('gherkinlint.options'); var args = grunt.file.expand(options.files); args.unshift(path.normalize(__dirname + '/node_modules/.bin/gherkin-lint')); grunt.util.spawn({ cmd: 'node', args: args, opts: {stdio: 'inherit', env: process.env} }, function(error, result, code) { // Propagate the exit code. done(code === 0); }); }; tasks.startup = function() { // Are we in a YUI directory? if (path.basename(path.resolve(cwd, '../../')) == 'yui') { grunt.task.run('yui'); // Are we in an AMD directory? } else if (inAMD) { grunt.task.run('amd'); } else { // Run them all!. grunt.task.run('css'); grunt.task.run('js'); grunt.task.run('gherkinlint'); } }; // On watch, we dynamically modify config to build only affected files. This // method is slightly complicated to deal with multiple changed files at once (copied // from the grunt-contrib-watch readme). var changedFiles = Object.create(null); var onChange = grunt.util._.debounce(function() { var files = Object.keys(changedFiles); grunt.config('eslint.amd.src', files); grunt.config('eslint.yui.src', files); grunt.config('uglify.amd.files', [{expand: true, src: files, rename: uglifyRename}]); grunt.config('shifter.options.paths', files); grunt.config('stylelint.less.src', files); grunt.config('gherkinlint.options.files', files); changedFiles = Object.create(null); }, 200); grunt.event.on('watch', function(action, filepath) { changedFiles[filepath] = action; onChange(); }); // Register NPM tasks. grunt.loadNpmTasks('grunt-contrib-uglify'); grunt.loadNpmTasks('grunt-contrib-less'); grunt.loadNpmTasks('grunt-contrib-watch'); grunt.loadNpmTasks('grunt-eslint'); grunt.loadNpmTasks('grunt-stylelint'); // Register JS tasks. grunt.registerTask('shifter', 'Run Shifter against the current directory', tasks.shifter); grunt.registerTask('gherkinlint', 'Run gherkinlint against the current directory', tasks.gherkinlint); grunt.registerTask('ignorefiles', 'Generate ignore files for linters', tasks.ignorefiles); grunt.registerTask('yui', ['eslint:yui', 'shifter']); grunt.registerTask('amd', ['eslint:amd', 'uglify']); grunt.registerTask('js', ['amd', 'yui']); // Register CSS taks. grunt.registerTask('css', ['stylelint:scss', 'stylelint:less', 'less:bootstrapbase', 'stylelint:css']); // Register the startup task. grunt.registerTask('startup', 'Run the correct tasks for the current directory', tasks.startup); // Register the default task. grunt.registerTask('default', ['startup']); };
macuco/moodlemacuco
Gruntfile.js
JavaScript
gpl-3.0
15,185
<?php /** * Shopware 5 * Copyright (c) shopware AG * * According to our dual licensing model, this program can be used either * under the terms of the GNU Affero General Public License, version 3, * or under a proprietary license. * * The texts of the GNU Affero General Public License with an additional * permission and of our proprietary license can be found at and * in the LICENSE file you have received along with this program. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * "Shopware" is a registered trademark of shopware AG. * The licensing of the program under the AGPLv3 does not imply a * trademark license. Therefore any rights, title and interest in * our trademarks remain entirely with us. */ namespace Shopware\Bundle\StoreFrontBundle\Gateway\DBAL; use Doctrine\DBAL\Connection; use Shopware\Bundle\StoreFrontBundle\Struct; use Shopware\Bundle\StoreFrontBundle\Gateway; /** * @category Shopware * @package Shopware\Bundle\StoreFrontBundle\Gateway\DBAL * @copyright Copyright (c) shopware AG (http://www.shopware.de) */ class MediaGateway implements Gateway\MediaGatewayInterface { /** * @var Connection */ private $connection; /** * @var FieldHelper */ private $fieldHelper; /** * @var Hydrator\MediaHydrator */ private $hydrator; /** * @param Connection $connection * @param FieldHelper $fieldHelper * @param Hydrator\MediaHydrator $hydrator */ public function __construct( Connection $connection, FieldHelper $fieldHelper, Hydrator\MediaHydrator $hydrator ) { $this->connection = $connection; $this->fieldHelper = $fieldHelper; $this->hydrator = $hydrator; } /** * @inheritdoc */ public function get($id, Struct\ShopContextInterface $context) { $media = $this->getList([$id], $context); return array_shift($media); } /** * @inheritdoc */ public function getList($ids, Struct\ShopContextInterface $context) { $query = $this->getQuery($context); $query->setParameter(':ids', $ids, Connection::PARAM_INT_ARRAY); /**@var $statement \Doctrine\DBAL\Driver\ResultStatement */ $statement = $query->execute(); $data = $statement->fetchAll(\PDO::FETCH_ASSOC); $result = []; foreach ($data as $row) { $mediaId = $row['__media_id']; $result[$mediaId] = $this->hydrator->hydrate($row); } return $result; } /** * @return \Doctrine\DBAL\Query\QueryBuilder */ private function getQuery() { $query = $this->connection->createQueryBuilder(); $query->select($this->fieldHelper->getMediaFields()); $query->from('s_media', 'media') ->innerJoin('media', 's_media_album_settings', 'mediaSettings', 'mediaSettings.albumID = media.albumID') ->leftJoin('media', 's_media_attributes', 'mediaAttribute', 'mediaAttribute.mediaID = media.id'); $query->where('media.id IN (:ids)'); return $query; } }
jenalgit/shopware
engine/Shopware/Bundle/StoreFrontBundle/Gateway/DBAL/MediaGateway.php
PHP
agpl-3.0
3,335
<?php /** * DOMDocument loader test for FluentDOMLoaderDOMNode * * @version $Id: DOMNodeTest.php 430 2010-03-29 15:53:43Z subjective $ * @license http://www.opensource.org/licenses/mit-license.php The MIT License * @copyright Copyright (c) 2009 Bastian Feder, Thomas Weinert * * @package FluentDOM * @subpackage UnitTests */ /** * load necessary files */ require_once('PHPUnit/Framework.php'); require_once(dirname(__FILE__).'/../../../FluentDOM/Loader/DOMNode.php'); PHPUnit_Util_Filter::addFileToFilter(__FILE__); /** * Test class for FluentDOMLoaderDOMNode. * * @package FluentDOM * @subpackage UnitTests */ class FluentDOMLoaderDOMNodeTest extends PHPUnit_Framework_TestCase { public function testLoad() { $loader = new FluentDOMLoaderDOMNode(); $dom = new DOMDocument(); $node = $dom->appendChild($dom->createElement('root')); $result = $loader->load($node, 'text/xml'); $this->assertType(PHPUnit_Framework_Constraint_IsType::TYPE_ARRAY, $result); $this->assertTrue($result[0] instanceof DOMDocument); $this->assertSame('root', $result[1][0]->tagName); } public function testLoadInvalid() { $loader = new FluentDOMLoaderDOMNode(); $result = $loader->load(NULL, 'text/xml'); $this->assertFalse($result); } public function testLoadInvalidWithDOMDocument() { $dom = new DOMDocument(); $loader = new FluentDOMLoaderDOMNode(); $result = $loader->load($dom, 'text/xml'); $this->assertFalse($result); } } ?>
kyfr59/atom-cg35
vendor/FluentDOM/tests/FluentDOM/Loader/DOMNodeTest.php
PHP
agpl-3.0
1,480
<?php /** * PHP-DI * * @link http://mnapoli.github.com/PHP-DI/ * @copyright Matthieu Napoli (http://mnapoli.fr/) * @license http://www.opensource.org/licenses/mit-license.php MIT (see the LICENSE file) */ namespace DI\Definition\Resolver; use DI\Definition\Definition; use DI\Definition\EnvironmentVariableDefinition; use DI\Definition\Exception\DefinitionException; use DI\Definition\Helper\DefinitionHelper; /** * Resolves a environment variable definition to a value. * * @author James Harris <james.harris@icecave.com.au> */ class EnvironmentVariableResolver implements DefinitionResolver { /** * @var DefinitionResolver */ private $definitionResolver; /** * @var callable */ private $variableReader; public function __construct(DefinitionResolver $definitionResolver, $variableReader = 'getenv') { $this->definitionResolver = $definitionResolver; $this->variableReader = $variableReader; } /** * Resolve an environment variable definition to a value. * * @param EnvironmentVariableDefinition $definition * * {@inheritdoc} */ public function resolve(Definition $definition, array $parameters = array()) { $this->assertIsEnvironmentVariableDefinition($definition); $value = call_user_func($this->variableReader, $definition->getVariableName()); if (false !== $value) { return $value; } elseif (!$definition->isOptional()) { throw new DefinitionException(sprintf( "The environment variable '%s' has not been defined", $definition->getVariableName() )); } $value = $definition->getDefaultValue(); // Nested definition if ($value instanceof DefinitionHelper) { return $this->definitionResolver->resolve($value->getDefinition('')); } return $value; } /** * @param EnvironmentVariableDefinition $definition * * {@inheritdoc} */ public function isResolvable(Definition $definition, array $parameters = array()) { $this->assertIsEnvironmentVariableDefinition($definition); return $definition->isOptional() || false !== call_user_func($this->variableReader, $definition->getVariableName()); } private function assertIsEnvironmentVariableDefinition(Definition $definition) { if (!$definition instanceof EnvironmentVariableDefinition) { throw new \InvalidArgumentException(sprintf( 'This definition resolver is only compatible with EnvironmentVariableDefinition objects, %s given', get_class($definition) )); } } }
befair/soulShape
wp/soulshape.earth/piwik/vendor/php-di/php-di/src/DI/Definition/Resolver/EnvironmentVariableResolver.php
PHP
agpl-3.0
2,757
// ThreadNotify.cs: implements a notification for the thread running the Gtk main // loop from another thread // // Authors: // Miguel de Icaza (miguel@ximian.com). // Gonzalo Paniagua Javier (gonzalo@ximian.com) // // Copyright (c) 2002 Ximian, Inc. // Copyright (c) 2004 Novell, Inc. // // This program is free software; you can redistribute it and/or // modify it under the terms of version 2 of the Lesser GNU General // Public License as published by the Free Software Foundation. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public // License along with this program; if not, write to the // Free Software Foundation, Inc., 59 Temple Place - Suite 330, // Boston, MA 02111-1307, USA. namespace Gtk { using System.Runtime.InteropServices; using System.Threading; using System; // <summary> // This delegate will be invoked on the main Gtk thread. // </summary> public delegate void ReadyEvent (); /// <summary> /// Utility class to help writting multi-threaded Gtk applications /// </summary> /// <remarks/> /// public class ThreadNotify : IDisposable { bool disposed; ReadyEvent re; GLib.IdleHandler idle; bool notified; /// <summary> /// The ReadyEvent delegate will be invoked on the current thread (which should /// be the Gtk thread) when another thread wakes us up by calling WakeupMain /// </summary> public ThreadNotify (ReadyEvent re) { this.re = re; idle = new GLib.IdleHandler (CallbackWrapper); } bool CallbackWrapper () { lock (this) { if (disposed) return false; notified = false; } re (); return false; } /// <summary> /// Invoke this function from a thread to call the `ReadyEvent' /// delegate provided in the constructor on the Main Gtk thread /// </summary> public void WakeupMain () { lock (this){ if (notified) return; notified = true; GLib.Idle.Add (idle); } } public void Close () { Dispose (true); GC.SuppressFinalize (this); } ~ThreadNotify () { Dispose (false); } void IDisposable.Dispose () { Close (); } protected virtual void Dispose (bool disposing) { lock (this) { disposed = true; } } } }
stsundermann/gtk-sharp-gi
sources/custom/ThreadNotify.cs
C#
lgpl-3.0
2,506
using System.Reflection; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("SampleSmartConsole")] [assembly: AssemblyDescription("")] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("7aa1ea3e-415f-4b0d-a3eb-0cb6aaacd87b")]
imbugs/StockSharp
Samples/SmartCom/SampleSmartConsole/Properties/AssemblyInfo.cs
C#
lgpl-3.0
481
# encoding: utf-8 from test.vim_test_case import VimTestCase as _VimTest from test.constant import * from test.util import running_on_windows # Quotes in Snippets {{{# # Test for Bug #774917 def _snip_quote(qt): return ( ('te' + qt + 'st', 'Expand me' + qt + '!', 'test: ' + qt), ('te', 'Bad', ''), ) class Snippet_With_SingleQuote(_VimTest): snippets = _snip_quote("'") keys = "te'st" + EX wanted = "Expand me'!" class Snippet_With_SingleQuote_List(_VimTest): snippets = _snip_quote("'") keys = 'te' + LS + '2\n' wanted = "Expand me'!" class Snippet_With_DoubleQuote(_VimTest): snippets = _snip_quote('"') keys = 'te"st' + EX wanted = "Expand me\"!" class Snippet_With_DoubleQuote_List(_VimTest): snippets = _snip_quote('"') keys = 'te' + LS + '2\n' wanted = "Expand me\"!" # End: Quotes in Snippets #}}} # Trailing whitespace {{{# class RemoveTrailingWhitespace(_VimTest): snippets = ('test', """Hello\t ${1:default}\n$2""", '', 's') wanted = """Hello\nGoodbye""" keys = 'test' + EX + BS + JF + 'Goodbye' class TrimSpacesAtEndOfLines(_VimTest): snippets = ('test', """next line\n\nshould be empty""", '', 'm') wanted = """\tnext line\n\n\tshould be empty""" keys = '\ttest' + EX class DoNotTrimSpacesAtEndOfLinesByDefault(_VimTest): snippets = ('test', """next line\n\nshould be empty""", '', '') wanted = """\tnext line\n\t\n\tshould be empty""" keys = '\ttest' + EX class LeaveTrailingWhitespace(_VimTest): snippets = ('test', """Hello \t ${1:default}\n$2""") wanted = """Hello \t \nGoodbye""" keys = 'test' + EX + BS + JF + 'Goodbye' # End: Trailing whitespace #}}} # Newline in default text {{{# # Tests for bug 616315 # class TrailingNewline_TabStop_NLInsideStuffBehind(_VimTest): snippets = ('test', r""" x${1: }<-behind1 $2<-behind2""") keys = 'test' + EX + 'j' + JF + 'k' wanted = """ xj<-behind1 k<-behind2""" class TrailingNewline_TabStop_JustNL(_VimTest): snippets = ('test', r""" x${1: } $2""") keys = 'test' + EX + 'j' + JF + 'k' wanted = """ xj k""" class TrailingNewline_TabStop_EndNL(_VimTest): snippets = ('test', r""" x${1:a } $2""") keys = 'test' + EX + 'j' + JF + 'k' wanted = """ xj k""" class TrailingNewline_TabStop_StartNL(_VimTest): snippets = ('test', r""" x${1: a} $2""") keys = 'test' + EX + 'j' + JF + 'k' wanted = """ xj k""" class TrailingNewline_TabStop_EndStartNL(_VimTest): snippets = ('test', r""" x${1: a } $2""") keys = 'test' + EX + 'j' + JF + 'k' wanted = """ xj k""" class TrailingNewline_TabStop_NotEndStartNL(_VimTest): snippets = ('test', r""" x${1:a a} $2""") keys = 'test' + EX + 'j' + JF + 'k' wanted = """ xj k""" class TrailingNewline_TabStop_ExtraNL_ECR(_VimTest): snippets = ('test', r""" x${1:a a} $2 """) keys = 'test' + EX + 'j' + JF + 'k' wanted = """ xj k """ class _MultiLineDefault(_VimTest): snippets = ('test', r""" x${1:a b c d e f} $2""") class MultiLineDefault_Jump(_MultiLineDefault): keys = 'test' + EX + JF + 'y' wanted = """ xa b c d e f y""" class MultiLineDefault_Type(_MultiLineDefault): keys = 'test' + EX + 'z' + JF + 'y' wanted = """ xz y""" class MultiLineDefault_BS(_MultiLineDefault): keys = 'test' + EX + BS + JF + 'y' wanted = """ x y""" # End: Newline in default text #}}} # Umlauts and Special Chars {{{# class _UmlautsBase(_VimTest): # SendKeys can't send UTF characters skip_if = lambda self: running_on_windows() class Snippet_With_Umlauts_List(_UmlautsBase): snippets = _snip_quote('ü') keys = 'te' + LS + '2\n' wanted = 'Expand meü!' class Snippet_With_Umlauts(_UmlautsBase): snippets = _snip_quote('ü') keys = 'teüst' + EX wanted = 'Expand meü!' class Snippet_With_Umlauts_TypeOn(_UmlautsBase): snippets = ('ül', 'üüüüüßßßß') keys = 'te ül' + EX + 'more text' wanted = 'te üüüüüßßßßmore text' class Snippet_With_Umlauts_OverwriteFirst(_UmlautsBase): snippets = ('ül', 'üü ${1:world} üü ${2:hello}ßß\nüüüü') keys = 'te ül' + EX + 'more text' + JF + JF + 'end' wanted = 'te üü more text üü helloßß\nüüüüend' class Snippet_With_Umlauts_OverwriteSecond(_UmlautsBase): snippets = ('ül', 'üü ${1:world} üü ${2:hello}ßß\nüüüü') keys = 'te ül' + EX + JF + 'more text' + JF + 'end' wanted = 'te üü world üü more textßß\nüüüüend' class Snippet_With_Umlauts_OverwriteNone(_UmlautsBase): snippets = ('ül', 'üü ${1:world} üü ${2:hello}ßß\nüüüü') keys = 'te ül' + EX + JF + JF + 'end' wanted = 'te üü world üü helloßß\nüüüüend' class Snippet_With_Umlauts_Mirrors(_UmlautsBase): snippets = ('ül', 'üü ${1:world} üü $1') keys = 'te ül' + EX + 'hello' wanted = 'te üü hello üü hello' class Snippet_With_Umlauts_Python(_UmlautsBase): snippets = ('ül', 'üü ${1:world} üü `!p snip.rv = len(t[1])*"a"`') keys = 'te ül' + EX + 'hüüll' wanted = 'te üü hüüll üü aaaaa' class UmlautsBeforeTriggerAndCharsAfter(_UmlautsBase): snippets = ('trig', 'success') keys = 'ööuu trig b' + 2 * ARR_L + EX wanted = 'ööuu success b' class NoUmlautsBeforeTriggerAndCharsAfter(_UmlautsBase): snippets = ('trig', 'success') keys = 'oouu trig b' + 2 * ARR_L + EX wanted = 'oouu success b' # End: Umlauts and Special Chars #}}}
Insanityandme/dotfiles
vim/bundle/ultisnips/test/test_Chars.py
Python
unlicense
5,521
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.hadoop.rdf.mapreduce.filter; import java.io.IOException; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.mrunit.mapreduce.MapDriver; import org.apache.jena.hadoop.rdf.mapreduce.AbstractMapperTests; import org.apache.jena.hadoop.rdf.mapreduce.filter.AbstractNodeTupleFilterMapper; import org.apache.jena.hadoop.rdf.types.AbstractNodeTupleWritable; import org.junit.Test; /** * Abstract tests for {@link AbstractNodeTupleFilterMapper} implementations * which filter based on the validity of tuples * * * * @param <TValue> * Tuple type * @param <T> * Writable tuple type */ public abstract class AbstractNodeTupleFilterTests<TValue, T extends AbstractNodeTupleWritable<TValue>> extends AbstractMapperTests<LongWritable, T, LongWritable, T> { protected final void generateData(MapDriver<LongWritable, T, LongWritable, T> driver, int num) { for (int i = 0; i < num; i++) { LongWritable key = new LongWritable(i); if (i % 2 == 0 && !this.noValidInputs()) { T value = this.createValidValue(i); driver.addInput(key, value); if (!this.isInverted()) driver.addOutput(key, value); } else { T value = this.createInvalidValue(i); driver.addInput(key, value); if (this.isInverted()) driver.addOutput(key, value); } } } /** * Method that may be overridden for testing filters where all the generated * data will be rejected as invalid * * @return True if there are no valid inputs, false otherwise (default) */ protected boolean noValidInputs() { return false; } /** * Method that may be overridden for testing filters with inverted mode * enabled i.e. where normally valid input is considered invalid and vice * versa * * @return True if inverted, false otherwise (default) */ protected boolean isInverted() { return false; } /** * Creates an invalid value * * @param i * Key * @return Invalid value */ protected abstract T createInvalidValue(int i); /** * Creates a valid value * * @param i * Key * @return Valid value */ protected abstract T createValidValue(int i); protected final void testFilterValid(int num) throws IOException { MapDriver<LongWritable, T, LongWritable, T> driver = this.getMapDriver(); this.generateData(driver, num); driver.runTest(); } /** * Test splitting tuples into their constituent nodes * * @throws IOException */ @Test public final void filter_valid_01() throws IOException { this.testFilterValid(1); } /** * Test splitting tuples into their constituent nodes * * @throws IOException */ @Test public final void filter_valid_02() throws IOException { this.testFilterValid(100); } /** * Test splitting tuples into their constituent nodes * * @throws IOException */ @Test public final void filter_valid_03() throws IOException { this.testFilterValid(1000); } /** * Test splitting tuples into their constituent nodes * * @throws IOException */ @Test public final void filter_valid_04() throws IOException { this.testFilterValid(2500); } }
samaitra/jena
jena-elephas/jena-elephas-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/AbstractNodeTupleFilterTests.java
Java
apache-2.0
4,405
/* * Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ /* * Do not modify this file. This file is generated from the elasticloadbalancing-2012-06-01.normal.json service model. */ using System; using System.Collections.Generic; using System.Xml.Serialization; using System.Text; using System.IO; using Amazon.Runtime; using Amazon.Runtime.Internal; namespace Amazon.ElasticLoadBalancing.Model { /// <summary> /// Container for the parameters to the CreateLoadBalancerPolicy operation. /// Creates a new policy that contains the necessary attributes depending on the policy /// type. Policies are settings that are saved for your load balancer and that can be /// applied to the front-end listener, or the back-end application server, depending on /// your policy type. /// </summary> public partial class CreateLoadBalancerPolicyRequest : AmazonElasticLoadBalancingRequest { private string _loadBalancerName; private List<PolicyAttribute> _policyAttributes = new List<PolicyAttribute>(); private string _policyName; private string _policyTypeName; /// <summary> /// Gets and sets the property LoadBalancerName. /// <para> /// The name associated with the LoadBalancer for which the policy is being created. /// /// </para> /// </summary> public string LoadBalancerName { get { return this._loadBalancerName; } set { this._loadBalancerName = value; } } // Check to see if LoadBalancerName property is set internal bool IsSetLoadBalancerName() { return this._loadBalancerName != null; } /// <summary> /// Gets and sets the property PolicyAttributes. /// <para> /// A list of attributes associated with the policy being created. /// </para> /// </summary> public List<PolicyAttribute> PolicyAttributes { get { return this._policyAttributes; } set { this._policyAttributes = value; } } // Check to see if PolicyAttributes property is set internal bool IsSetPolicyAttributes() { return this._policyAttributes != null && this._policyAttributes.Count > 0; } /// <summary> /// Gets and sets the property PolicyName. /// <para> /// The name of the load balancer policy being created. The name must be unique within /// the set of policies for this load balancer. /// </para> /// </summary> public string PolicyName { get { return this._policyName; } set { this._policyName = value; } } // Check to see if PolicyName property is set internal bool IsSetPolicyName() { return this._policyName != null; } /// <summary> /// Gets and sets the property PolicyTypeName. /// <para> /// The name of the base policy type being used to create this policy. To get the list /// of policy types, use the <a>DescribeLoadBalancerPolicyTypes</a> action. /// </para> /// </summary> public string PolicyTypeName { get { return this._policyTypeName; } set { this._policyTypeName = value; } } // Check to see if PolicyTypeName property is set internal bool IsSetPolicyTypeName() { return this._policyTypeName != null; } } }
ykbarros/aws-sdk-xamarin
AWS.XamarinSDK/AWSSDK_iOS/Amazon.ElasticLoadBalancing/Model/CreateLoadBalancerPolicyRequest.cs
C#
apache-2.0
4,107
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for Additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ using NPOI.HWPF.UserModel; using NPOI.HWPF.Model; using System; namespace NPOI.HWPF.UserModel { /** * A HeaderStory is a Header, a Footer, or footnote/endnote * separator. * All the Header Stories get stored in the same Range in the * document, and this handles Getting out all the individual * parts. * * WARNING - you shouldn't change the headers or footers, * as offSets are not yet updated! */ public class HeaderStories { private Range headerStories; private PlexOfCps plcfHdd; private bool stripFields = false; public HeaderStories(HWPFDocument doc) { this.headerStories = doc.GetHeaderStoryRange(); FileInformationBlock fib = doc.GetFileInformationBlock(); // If there's no PlcfHdd, nothing to do if (fib.GetCcpHdd() == 0) { return; } if (fib.GetPlcfHddSize() == 0) { return; } // Handle the PlcfHdd plcfHdd = new PlexOfCps( doc.GetTableStream(), fib.GetPlcfHddOffset(), fib.GetPlcfHddSize(), 0 ); } public String FootnoteSeparator { get{ return GetAt(0); } } public String FootnoteContSeparator { get{ return GetAt(1); } } public String FootnoteContNote { get{ return GetAt(2); } } public String EndnoteSeparator { get{ return GetAt(3); } } public String EndnoteContSeparator { get{ return GetAt(4); } } public String EndnoteContNote { get { return GetAt(5); } } public String EvenHeader { get { return GetAt(6 + 0); } } public String OddHeader { get { return GetAt(6 + 1); } } public String FirstHeader { get { return GetAt(6 + 4); } } /** * Returns the correct, defined header for the given * one based page * @param pageNumber The one based page number */ public String GetHeader(int pageNumber) { // First page header is optional, only return // if it's set if (pageNumber == 1) { if (FirstHeader.Length > 0) { return FirstHeader; } } // Even page header is optional, only return // if it's set if (pageNumber % 2 == 0) { if (EvenHeader.Length > 0) { return EvenHeader; } } // Odd is the default return OddHeader; } public String EvenFooter { get { return GetAt(6 + 2); } } public String OddFooter { get { return GetAt(6 + 3); } } public String FirstFooter { get { return GetAt(6 + 5); } } /** * Returns the correct, defined footer for the given * one based page * @param pageNumber The one based page number */ public String GetFooter(int pageNumber) { // First page footer is optional, only return // if it's set if (pageNumber == 1) { if (FirstFooter.Length > 0) { return FirstFooter; } } // Even page footer is optional, only return // if it's set if (pageNumber % 2 == 0) { if (EvenFooter.Length > 0) { return EvenFooter; } } // Odd is the default return OddFooter; } /** * Get the string that's pointed to by the * given plcfHdd index */ private String GetAt(int plcfHddIndex) { if (plcfHdd == null) return null; GenericPropertyNode prop = plcfHdd.GetProperty(plcfHddIndex); if (prop.Start == prop.End) { // Empty story return ""; } if (prop.End < prop.Start) { // Broken properties? return ""; } // Ensure we're Getting a sensible length String rawText = headerStories.Text; int start = Math.Min(prop.Start, rawText.Length); int end = Math.Min(prop.End, rawText.Length); // Grab the contents String text = rawText.Substring(start, end-start); // Strip off fields and macros if requested if (stripFields) { return Range.StripFields(text); } // If you create a header/footer, then remove it again, word // will leave \r\r. Turn these back into an empty string, // which is more what you'd expect if (text.Equals("\r\r")) { return ""; } return text; } public Range GetRange() { return headerStories; } internal PlexOfCps GetPlcfHdd() { return plcfHdd; } /** * Are fields currently being stripped from * the text that this {@link HeaderStories} returns? * Default is false, but can be Changed */ public bool AreFieldsStripped { get { return stripFields; } set { this.stripFields = value; } } } }
antony-liu/npoi
scratchpad/HWPF/UserModel/HeaderStories.cs
C#
apache-2.0
7,342
package org.apache.helix; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.List; import org.apache.helix.model.Message; /** * Interface to implement when there is a change to messages */ public interface MessageListener { /** * Invoked when message changes * @param instanceName * @param messages * @param changeContext */ public void onMessage(String instanceName, List<Message> messages, NotificationContext changeContext); }
OopsOutOfMemory/helix
helix-core/src/main/java/org/apache/helix/MessageListener.java
Java
apache-2.0
1,237
/* * Copyright 2013-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.junit; import com.facebook.buck.util.concurrent.MoreExecutors; import org.junit.runner.Description; import org.junit.runner.Runner; import org.junit.runner.notification.RunNotifier; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; /** * {@link Runner} that composes a {@link Runner} that enforces a default timeout when running a * test. */ class DelegateRunnerWithTimeout extends Runner { /** * {@link ExecutorService} on which all tests run by this {@link Runner} are executed. * <p> * In Robolectric, the {@code ShadowLooper.resetThreadLoopers()} asserts that the current thread * is the same as the thread on which the {@code ShadowLooper} class was loaded. Therefore, to * preserve the behavior of the {@code org.robolectric.RobolectricTestRunner}, we use an * {@link ExecutorService} to create and run the test on. This has the unfortunate side effect of * creating one thread per runner, but JUnit ensures that they're all called serially, so the * overall effect is that of having only a single thread. * <p> * We use a {@link ThreadLocal} so that if a test spawns more tests that create their own runners * we don't deadlock. */ private static final ThreadLocal<ExecutorService> executor = new ThreadLocal<ExecutorService>() { @Override protected ExecutorService initialValue() { return MoreExecutors.newSingleThreadExecutor(DelegateRunnerWithTimeout.class.getSimpleName()); } }; private final Runner delegate; private final long defaultTestTimeoutMillis; DelegateRunnerWithTimeout(Runner delegate, long defaultTestTimeoutMillis) { if (defaultTestTimeoutMillis <= 0) { throw new IllegalArgumentException(String.format( "defaultTestTimeoutMillis must be greater than zero but was: %s.", defaultTestTimeoutMillis)); } this.delegate = delegate; this.defaultTestTimeoutMillis = defaultTestTimeoutMillis; } /** * @return the description from the original {@link Runner} wrapped by this {@link Runner}. */ @Override public Description getDescription() { return delegate.getDescription(); } /** * Runs the tests for this runner, but wraps the specified {@code notifier} with a * {@link DelegateRunNotifier} that intercepts calls to the original {@code notifier}. * The {@link DelegateRunNotifier} is what enables us to impose our default timeout. */ @Override public void run(RunNotifier notifier) { final DelegateRunNotifier wrapper = new DelegateRunNotifier( delegate, notifier, defaultTestTimeoutMillis); // We run the Runner in an Executor so that we can tear it down if we need to. Future<?> future = executor.get().submit(new Runnable() { @Override public void run() { delegate.run(wrapper); } }); // We poll the Executor to see if the Runner is complete. In the event that a test has exceeded // the default timeout, we cancel the Runner to protect against the case where the test hangs // forever. while (true) { if (future.isDone()) { // Normal termination: hooray! return; } else if (wrapper.hasTestThatExceededTimeout()) { // The test results that have been reported to the RunNotifier should still be output, but // there may be tests that did not have a chance to run. Unfortunately, we have no way to // tell the Runner to cancel only the runaway test. executor.get().shutdownNow(); return; } else { // Tests are still running, so wait and try again. try { Thread.sleep(/* milliseconds */ 250L); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } } } }
neonichu/buck
src/com/facebook/buck/junit/DelegateRunnerWithTimeout.java
Java
apache-2.0
4,423
/* * Copyright (C) 2011 the original author or authors. * See the notice.md file distributed with this work for additional * information regarding copyright ownership. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.iq80.leveldb.impl; import org.iq80.leveldb.util.Slice; import java.io.File; import java.io.IOException; public interface LogWriter { boolean isClosed(); void close() throws IOException; void delete() throws IOException; File getFile(); long getFileNumber(); // Writes a stream of chunks such that no chunk is split across a block boundary void addRecord(Slice record, boolean force) throws IOException; }
gaoch023/leveldb-1
leveldb/src/main/java/org/iq80/leveldb/impl/LogWriter.java
Java
apache-2.0
1,222
package com.crawljax.core; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Queue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.locks.Lock; import javax.inject.Inject; import javax.inject.Provider; import javax.inject.Singleton; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.codahale.metrics.Counter; import com.codahale.metrics.MetricRegistry; import com.crawljax.core.configuration.BrowserConfiguration; import com.crawljax.core.state.Eventable.EventType; import com.crawljax.core.state.StateFlowGraph; import com.crawljax.core.state.StateVertex; import com.crawljax.metrics.MetricsModule; import com.google.common.collect.ImmutableList; import com.google.common.collect.Maps; import com.google.common.collect.Queues; import com.google.common.util.concurrent.Striped; /** * Contains all the {@link CandidateCrawlAction}s that still have to be fired to get a result. */ @Singleton public class UnfiredCandidateActions { private static final Logger LOG = LoggerFactory.getLogger(UnfiredCandidateActions.class); private final Map<Integer, Queue<CandidateCrawlAction>> cache; private final BlockingQueue<Integer> statesWithCandidates; private final Striped<Lock> locks; private final Provider<StateFlowGraph> sfg; private final Counter crawlerLostCount; private final Counter unfiredActionsCount; @Inject UnfiredCandidateActions(BrowserConfiguration config, Provider<StateFlowGraph> sfg, MetricRegistry registry) { this.sfg = sfg; cache = Maps.newHashMap(); statesWithCandidates = Queues.newLinkedBlockingQueue(); // Every browser gets a lock. locks = Striped.lock(config.getNumberOfBrowsers()); crawlerLostCount = registry.register(MetricsModule.EVENTS_PREFIX + "crawler_lost", new Counter()); unfiredActionsCount = registry.register(MetricsModule.EVENTS_PREFIX + "unfired_actions", new Counter()); } /** * @param state * The state you want to poll an {@link CandidateCrawlAction} for. * @return The next to-be-crawled action or <code>null</code> if none available. */ CandidateCrawlAction pollActionOrNull(StateVertex state) { LOG.debug("Polling action for state {}", state.getName()); Lock lock = locks.get(state.getId()); try { lock.lock(); Queue<CandidateCrawlAction> queue = cache.get(state.getId()); if (queue == null) { return null; } else { CandidateCrawlAction action = queue.poll(); if (queue.isEmpty()) { LOG.debug("All actions polled for state {}", state.getName()); cache.remove(state.getId()); removeStateFromQueue(state.getId()); LOG.debug("There are now {} states with unfinished actions", cache.size()); } return action; } } finally { lock.unlock(); } } private void removeStateFromQueue(int id) { while (statesWithCandidates.remove(id)) { LOG.trace("Removed id {} from the queue", id); } } /** * @param extract * The actions you want to add to a state. * @param currentState * The state you are in. */ public void addActions(ImmutableList<CandidateElement> extract, StateVertex currentState) { List<CandidateCrawlAction> actions = new ArrayList<>(extract.size()); for (CandidateElement candidateElement : extract) { actions.add(new CandidateCrawlAction(candidateElement, EventType.click)); } addActions(actions, currentState); } /** * @param actions * The actions you want to add to a state. * @param state * The state name. This should be unique per state. */ void addActions(Collection<CandidateCrawlAction> actions, StateVertex state) { if (actions.isEmpty()) { LOG.debug("Received empty actions list. Ignoring..."); return; } Lock lock = locks.get(state.getId()); try { lock.lock(); LOG.debug("Adding {} crawl actions for state {}", actions.size(), state.getId()); if (cache.containsKey(state.getId())) { cache.get(state.getId()).addAll(actions); } else { cache.put(state.getId(), Queues.newConcurrentLinkedQueue(actions)); } statesWithCandidates.add(state.getId()); LOG.info("There are {} states with unfired actions", statesWithCandidates.size()); } finally { lock.unlock(); } } /** * @return If there are any pending actions to be crawled. This method is not threadsafe and * might return a stale value. */ public boolean isEmpty() { return statesWithCandidates.isEmpty(); } /** * @return A new crawl task as soon as one is ready. Until then, it blocks. * @throws InterruptedException * when taking from the queue is interrupted. */ public StateVertex awaitNewTask() throws InterruptedException { int id = statesWithCandidates.take(); // Put it back the end of the queue. It will be removed later. statesWithCandidates.add(id); LOG.debug("New task polled for state {}", id); LOG.info("There are {} states with unfired actions", statesWithCandidates.size()); return sfg.get().getById(id); } public void purgeActionsForState(StateVertex crawlTask) { Lock lock = locks.get(crawlTask.getId()); try { lock.lock(); LOG.debug("Removing tasks for target state {}", crawlTask.getName()); removeStateFromQueue(crawlTask.getId()); Queue<CandidateCrawlAction> removed = cache.remove(crawlTask.getId()); if (removed != null) { unfiredActionsCount.inc(removed.size()); } } finally { lock.unlock(); crawlerLostCount.inc(); } } }
fivejjs/crawljax
core/src/main/java/com/crawljax/core/UnfiredCandidateActions.java
Java
apache-2.0
5,551
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import java.io.File; import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.URI; import java.security.PrivilegedAction; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.server.common.HdfsConstants; import org.apache.hadoop.hdfs.server.common.InconsistentFSStateException; import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol; import org.apache.hadoop.http.HttpServer; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.metrics2.source.JvmMetricsSource; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.Krb5AndCertsSslSocketConnector; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.Daemon; import org.apache.hadoop.util.StringUtils; /********************************************************** * The Secondary NameNode is a helper to the primary NameNode. * The Secondary is responsible for supporting periodic checkpoints * of the HDFS metadata. The current design allows only one Secondary * NameNode per HDFs cluster. * * The Secondary NameNode is a daemon that periodically wakes * up (determined by the schedule specified in the configuration), * triggers a periodic checkpoint and then goes back to sleep. * The Secondary NameNode uses the ClientProtocol to talk to the * primary NameNode. * **********************************************************/ public class SecondaryNameNode implements Runnable { static{ Configuration.addDefaultResource("hdfs-default.xml"); Configuration.addDefaultResource("hdfs-site.xml"); } public static final Log LOG = LogFactory.getLog(SecondaryNameNode.class.getName()); private String fsName; private CheckpointStorage checkpointImage; private NamenodeProtocol namenode; private Configuration conf; private InetSocketAddress nameNodeAddr; private volatile boolean shouldRun; private HttpServer infoServer; private int infoPort; private int imagePort; private String infoBindAddress; private Collection<File> checkpointDirs; private Collection<File> checkpointEditsDirs; private long checkpointPeriod; // in seconds private long checkpointSize; // size (in MB) of current Edit Log /** * Utility class to facilitate junit test error simulation. */ static class ErrorSimulator { private static boolean[] simulation = null; // error simulation events static void initializeErrorSimulationEvent(int numberOfEvents) { simulation = new boolean[numberOfEvents]; for (int i = 0; i < numberOfEvents; i++) { simulation[i] = false; } } static boolean getErrorSimulation(int index) { if(simulation == null) return false; assert(index < simulation.length); return simulation[index]; } static void setErrorSimulation(int index) { assert(index < simulation.length); simulation[index] = true; } static void clearErrorSimulation(int index) { assert(index < simulation.length); simulation[index] = false; } } FSImage getFSImage() { return checkpointImage; } /** * Create a connection to the primary namenode. */ public SecondaryNameNode(Configuration conf) throws IOException { try { initialize(conf); } catch(IOException e) { shutdown(); throw e; } } @SuppressWarnings("deprecation") public static InetSocketAddress getHttpAddress(Configuration conf) { String infoAddr = NetUtils.getServerAddress(conf, "dfs.secondary.info.bindAddress", "dfs.secondary.info.port", "dfs.secondary.http.address"); return NetUtils.createSocketAddr(infoAddr); } /** * Initialize SecondaryNameNode. */ private void initialize(final Configuration conf) throws IOException { final InetSocketAddress infoSocAddr = getHttpAddress(conf); infoBindAddress = infoSocAddr.getHostName(); if (UserGroupInformation.isSecurityEnabled()) { SecurityUtil.login(conf, DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY, DFSConfigKeys.DFS_SECONDARY_NAMENODE_USER_NAME_KEY, infoBindAddress); } // initiate Java VM metrics JvmMetricsSource.create("SecondaryNameNode", conf.get("session.id")); // Create connection to the namenode. shouldRun = true; nameNodeAddr = NameNode.getServiceAddress(conf, true); this.conf = conf; this.namenode = (NamenodeProtocol) RPC.waitForProxy(NamenodeProtocol.class, NamenodeProtocol.versionID, nameNodeAddr, conf); // initialize checkpoint directories fsName = getInfoServer(); checkpointDirs = FSImage.getCheckpointDirs(conf, "/tmp/hadoop/dfs/namesecondary"); checkpointEditsDirs = FSImage.getCheckpointEditsDirs(conf, "/tmp/hadoop/dfs/namesecondary"); checkpointImage = new CheckpointStorage(); checkpointImage.recoverCreate(checkpointDirs, checkpointEditsDirs); // Initialize other scheduling parameters from the configuration checkpointPeriod = conf.getLong("fs.checkpoint.period", 3600); checkpointSize = conf.getLong("fs.checkpoint.size", 4194304); // initialize the webserver for uploading files. // Kerberized SSL servers must be run from the host principal... UserGroupInformation httpUGI = UserGroupInformation.loginUserFromKeytabAndReturnUGI( SecurityUtil.getServerPrincipal(conf .get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_KRB_HTTPS_USER_NAME_KEY), infoBindAddress), conf.get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY)); try { infoServer = httpUGI.doAs(new PrivilegedExceptionAction<HttpServer>() { @Override public HttpServer run() throws IOException, InterruptedException { LOG.info("Starting web server as: " + UserGroupInformation.getCurrentUser().getUserName()); int tmpInfoPort = infoSocAddr.getPort(); infoServer = new HttpServer("secondary", infoBindAddress, tmpInfoPort, tmpInfoPort == 0, conf, SecurityUtil.getAdminAcls(conf, DFSConfigKeys.DFS_ADMIN)); if(UserGroupInformation.isSecurityEnabled()) { System.setProperty("https.cipherSuites", Krb5AndCertsSslSocketConnector.KRB5_CIPHER_SUITES.get(0)); InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(infoBindAddress + ":"+ conf.get( "dfs.secondary.https.port", infoBindAddress + ":" + 0)); imagePort = secInfoSocAddr.getPort(); infoServer.addSslListener(secInfoSocAddr, conf, false, true); } infoServer.setAttribute("name.system.image", checkpointImage); infoServer.setAttribute(JspHelper.CURRENT_CONF, conf); infoServer.addInternalServlet("getimage", "/getimage", GetImageServlet.class, true); infoServer.start(); return infoServer; } }); } catch (InterruptedException e) { throw new RuntimeException(e); } LOG.info("Web server init done"); // The web-server port can be ephemeral... ensure we have the correct info infoPort = infoServer.getPort(); if(!UserGroupInformation.isSecurityEnabled()) imagePort = infoPort; conf.set("dfs.secondary.http.address", infoBindAddress + ":" +infoPort); LOG.info("Secondary Web-server up at: " + infoBindAddress + ":" +infoPort); LOG.info("Secondary image servlet up at: " + infoBindAddress + ":" + imagePort); LOG.warn("Checkpoint Period :" + checkpointPeriod + " secs " + "(" + checkpointPeriod/60 + " min)"); LOG.warn("Log Size Trigger :" + checkpointSize + " bytes " + "(" + checkpointSize/1024 + " KB)"); } /** * Shut down this instance of the datanode. * Returns only after shutdown is complete. */ public void shutdown() { shouldRun = false; try { if (infoServer != null) infoServer.stop(); } catch (Exception e) { LOG.warn("Exception shutting down SecondaryNameNode", e); } try { if (checkpointImage != null) checkpointImage.close(); } catch(IOException e) { LOG.warn(StringUtils.stringifyException(e)); } } public void run() { if (UserGroupInformation.isSecurityEnabled()) { UserGroupInformation ugi = null; try { ugi = UserGroupInformation.getLoginUser(); } catch (IOException e) { LOG.error(StringUtils.stringifyException(e)); e.printStackTrace(); Runtime.getRuntime().exit(-1); } ugi.doAs(new PrivilegedAction<Object>() { @Override public Object run() { doWork(); return null; } }); } else { doWork(); } } // // The main work loop // public void doWork() { // // Poll the Namenode (once every 5 minutes) to find the size of the // pending edit log. // long period = 5 * 60; // 5 minutes long lastCheckpointTime = 0; if (checkpointPeriod < period) { period = checkpointPeriod; } while (shouldRun) { try { Thread.sleep(1000 * period); } catch (InterruptedException ie) { // do nothing } if (!shouldRun) { break; } try { // We may have lost our ticket since last checkpoint, log in again, just in case if(UserGroupInformation.isSecurityEnabled()) UserGroupInformation.getCurrentUser().reloginFromKeytab(); long now = System.currentTimeMillis(); long size = namenode.getEditLogSize(); if (size >= checkpointSize || now >= lastCheckpointTime + 1000 * checkpointPeriod) { doCheckpoint(); lastCheckpointTime = now; } } catch (IOException e) { LOG.error("Exception in doCheckpoint: "); LOG.error(StringUtils.stringifyException(e)); e.printStackTrace(); } catch (Throwable e) { LOG.error("Throwable Exception in doCheckpoint: "); LOG.error(StringUtils.stringifyException(e)); e.printStackTrace(); Runtime.getRuntime().exit(-1); } } } /** * Download <code>fsimage</code> and <code>edits</code> * files from the name-node. * @throws IOException */ private void downloadCheckpointFiles(final CheckpointSignature sig ) throws IOException { try { UserGroupInformation.getCurrentUser().doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { checkpointImage.cTime = sig.cTime; checkpointImage.checkpointTime = sig.checkpointTime; // get fsimage String fileid = "getimage=1"; File[] srcNames = checkpointImage.getImageFiles(); assert srcNames.length > 0 : "No checkpoint targets."; TransferFsImage.getFileClient(fsName, fileid, srcNames); LOG.info("Downloaded file " + srcNames[0].getName() + " size " + srcNames[0].length() + " bytes."); // get edits file fileid = "getedit=1"; srcNames = checkpointImage.getEditsFiles(); assert srcNames.length > 0 : "No checkpoint targets."; TransferFsImage.getFileClient(fsName, fileid, srcNames); LOG.info("Downloaded file " + srcNames[0].getName() + " size " + srcNames[0].length() + " bytes."); checkpointImage.checkpointUploadDone(); return null; } }); } catch (InterruptedException e) { throw new RuntimeException(e); } } /** * Copy the new fsimage into the NameNode */ private void putFSImage(CheckpointSignature sig) throws IOException { String fileid = "putimage=1&port=" + imagePort + "&machine=" + infoBindAddress + "&token=" + sig.toString(); LOG.info("Posted URL " + fsName + fileid); TransferFsImage.getFileClient(fsName, fileid, (File[])null); } /** * Returns the Jetty server that the Namenode is listening on. */ private String getInfoServer() throws IOException { URI fsName = FileSystem.getDefaultUri(conf); if (!"hdfs".equals(fsName.getScheme())) { throw new IOException("This is not a DFS"); } String infoAddr = NameNode.getInfoServer(conf); LOG.debug("infoAddr = " + infoAddr); return infoAddr; } /** * Create a new checkpoint */ void doCheckpoint() throws IOException { // Do the required initialization of the merge work area. startCheckpoint(); // Tell the namenode to start logging transactions in a new edit file // Retuns a token that would be used to upload the merged image. CheckpointSignature sig = (CheckpointSignature)namenode.rollEditLog(); // error simulation code for junit test if (ErrorSimulator.getErrorSimulation(0)) { throw new IOException("Simulating error0 " + "after creating edits.new"); } downloadCheckpointFiles(sig); // Fetch fsimage and edits doMerge(sig); // Do the merge // // Upload the new image into the NameNode. Then tell the Namenode // to make this new uploaded image as the most current image. // putFSImage(sig); // error simulation code for junit test if (ErrorSimulator.getErrorSimulation(1)) { throw new IOException("Simulating error1 " + "after uploading new image to NameNode"); } namenode.rollFsImage(); checkpointImage.endCheckpoint(); LOG.warn("Checkpoint done. New Image Size: " + checkpointImage.getFsImageName().length()); } private void startCheckpoint() throws IOException { checkpointImage.unlockAll(); checkpointImage.getEditLog().close(); checkpointImage.recoverCreate(checkpointDirs, checkpointEditsDirs); checkpointImage.startCheckpoint(); } /** * Merge downloaded image and edits and write the new image into * current storage directory. */ private void doMerge(CheckpointSignature sig) throws IOException { FSNamesystem namesystem = new FSNamesystem(checkpointImage, conf); assert namesystem.dir.fsImage == checkpointImage; checkpointImage.doMerge(sig); } /** * @param argv The parameters passed to this program. * @exception Exception if the filesystem does not exist. * @return 0 on success, non zero on error. */ private int processArgs(String[] argv) throws Exception { if (argv.length < 1) { printUsage(""); return -1; } int exitCode = -1; int i = 0; String cmd = argv[i++]; // // verify that we have enough command line parameters // if ("-geteditsize".equals(cmd)) { if (argv.length != 1) { printUsage(cmd); return exitCode; } } else if ("-checkpoint".equals(cmd)) { if (argv.length != 1 && argv.length != 2) { printUsage(cmd); return exitCode; } if (argv.length == 2 && !"force".equals(argv[i])) { printUsage(cmd); return exitCode; } } exitCode = 0; try { if ("-checkpoint".equals(cmd)) { long size = namenode.getEditLogSize(); if (size >= checkpointSize || argv.length == 2 && "force".equals(argv[i])) { doCheckpoint(); } else { System.err.println("EditLog size " + size + " bytes is " + "smaller than configured checkpoint " + "size " + checkpointSize + " bytes."); System.err.println("Skipping checkpoint."); } } else if ("-geteditsize".equals(cmd)) { long size = namenode.getEditLogSize(); System.out.println("EditLog size is " + size + " bytes"); } else { exitCode = -1; LOG.error(cmd.substring(1) + ": Unknown command"); printUsage(""); } } catch (RemoteException e) { // // This is a error returned by hadoop server. Print // out the first line of the error mesage, ignore the stack trace. exitCode = -1; try { String[] content; content = e.getLocalizedMessage().split("\n"); LOG.error(cmd.substring(1) + ": " + content[0]); } catch (Exception ex) { LOG.error(cmd.substring(1) + ": " + ex.getLocalizedMessage()); } } catch (IOException e) { // // IO exception encountered locally. // exitCode = -1; LOG.error(cmd.substring(1) + ": " + e.getLocalizedMessage()); } finally { // Does the RPC connection need to be closed? } return exitCode; } /** * Displays format of commands. * @param cmd The command that is being executed. */ private void printUsage(String cmd) { if ("-geteditsize".equals(cmd)) { System.err.println("Usage: java SecondaryNameNode" + " [-geteditsize]"); } else if ("-checkpoint".equals(cmd)) { System.err.println("Usage: java SecondaryNameNode" + " [-checkpoint [force]]"); } else { System.err.println("Usage: java SecondaryNameNode " + "[-checkpoint [force]] " + "[-geteditsize] "); } } /** * main() has some simple utility methods. * @param argv Command line parameters. * @exception Exception if the filesystem does not exist. */ public static void main(String[] argv) throws Exception { StringUtils.startupShutdownMessage(SecondaryNameNode.class, argv, LOG); Configuration tconf = new Configuration(); if (argv.length >= 1) { SecondaryNameNode secondary = new SecondaryNameNode(tconf); int ret = secondary.processArgs(argv); System.exit(ret); } // Create a never ending deamon Daemon checkpointThread = new Daemon(new SecondaryNameNode(tconf)); checkpointThread.start(); } static class CheckpointStorage extends FSImage { /** */ CheckpointStorage() throws IOException { super(); } @Override public boolean isConversionNeeded(StorageDirectory sd) { return false; } /** * Analyze checkpoint directories. * Create directories if they do not exist. * Recover from an unsuccessful checkpoint is necessary. * * @param dataDirs * @param editsDirs * @throws IOException */ void recoverCreate(Collection<File> dataDirs, Collection<File> editsDirs) throws IOException { Collection<File> tempDataDirs = new ArrayList<File>(dataDirs); Collection<File> tempEditsDirs = new ArrayList<File>(editsDirs); this.storageDirs = new ArrayList<StorageDirectory>(); setStorageDirectories(tempDataDirs, tempEditsDirs); for (Iterator<StorageDirectory> it = dirIterator(); it.hasNext();) { StorageDirectory sd = it.next(); boolean isAccessible = true; try { // create directories if don't exist yet if(!sd.getRoot().mkdirs()) { // do nothing, directory is already created } } catch(SecurityException se) { isAccessible = false; } if(!isAccessible) throw new InconsistentFSStateException(sd.getRoot(), "cannot access checkpoint directory."); StorageState curState; try { curState = sd.analyzeStorage(HdfsConstants.StartupOption.REGULAR); // sd is locked but not opened switch(curState) { case NON_EXISTENT: // fail if any of the configured checkpoint dirs are inaccessible throw new InconsistentFSStateException(sd.getRoot(), "checkpoint directory does not exist or is not accessible."); case NOT_FORMATTED: break; // it's ok since initially there is no current and VERSION case NORMAL: break; default: // recovery is possible sd.doRecover(curState); } } catch (IOException ioe) { sd.unlock(); throw ioe; } } } /** * Prepare directories for a new checkpoint. * <p> * Rename <code>current</code> to <code>lastcheckpoint.tmp</code> * and recreate <code>current</code>. * @throws IOException */ void startCheckpoint() throws IOException { for(StorageDirectory sd : storageDirs) { moveCurrent(sd); } } void endCheckpoint() throws IOException { for(StorageDirectory sd : storageDirs) { moveLastCheckpoint(sd); } } /** * Merge image and edits, and verify consistency with the signature. */ private void doMerge(CheckpointSignature sig) throws IOException { getEditLog().open(); StorageDirectory sdName = null; StorageDirectory sdEdits = null; Iterator<StorageDirectory> it = null; it = dirIterator(NameNodeDirType.IMAGE); if (it.hasNext()) sdName = it.next(); it = dirIterator(NameNodeDirType.EDITS); if (it.hasNext()) sdEdits = it.next(); if ((sdName == null) || (sdEdits == null)) throw new IOException("Could not locate checkpoint directories"); loadFSImage(FSImage.getImageFile(sdName, NameNodeFile.IMAGE)); loadFSEdits(sdEdits); sig.validateStorageInfo(this); saveNamespace(false); } } }
pombredanne/brisk-hadoop-common
src/hdfs/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
Java
apache-2.0
23,008
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.engine; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; import java.util.Iterator; public class SegmentsStats implements Streamable, ToXContent { private long count; private long memoryInBytes; private long termsMemoryInBytes; private long storedFieldsMemoryInBytes; private long termVectorsMemoryInBytes; private long normsMemoryInBytes; private long pointsMemoryInBytes; private long docValuesMemoryInBytes; private long indexWriterMemoryInBytes; private long versionMapMemoryInBytes; private long bitsetMemoryInBytes; private ImmutableOpenMap<String, Long> fileSizes = ImmutableOpenMap.of(); /* * A map to provide a best-effort approach describing Lucene index files. * * Ideally this should be in sync to what the current version of Lucene is using, but it's harmless to leave extensions out, * they'll just miss a proper description in the stats */ private static ImmutableOpenMap<String, String> fileDescriptions = ImmutableOpenMap.<String, String>builder() .fPut("si", "Segment Info") .fPut("fnm", "Fields") .fPut("fdx", "Field Index") .fPut("fdt", "Field Data") .fPut("tim", "Term Dictionary") .fPut("tip", "Term Index") .fPut("doc", "Frequencies") .fPut("pos", "Positions") .fPut("pay", "Payloads") .fPut("nvd", "Norms") .fPut("nvm", "Norms") .fPut("dii", "Points") .fPut("dim", "Points") .fPut("dvd", "DocValues") .fPut("dvm", "DocValues") .fPut("tvx", "Term Vector Index") .fPut("tvd", "Term Vector Documents") .fPut("tvf", "Term Vector Fields") .fPut("liv", "Live Documents") .build(); public SegmentsStats() {} public void add(long count, long memoryInBytes) { this.count += count; this.memoryInBytes += memoryInBytes; } public void addTermsMemoryInBytes(long termsMemoryInBytes) { this.termsMemoryInBytes += termsMemoryInBytes; } public void addStoredFieldsMemoryInBytes(long storedFieldsMemoryInBytes) { this.storedFieldsMemoryInBytes += storedFieldsMemoryInBytes; } public void addTermVectorsMemoryInBytes(long termVectorsMemoryInBytes) { this.termVectorsMemoryInBytes += termVectorsMemoryInBytes; } public void addNormsMemoryInBytes(long normsMemoryInBytes) { this.normsMemoryInBytes += normsMemoryInBytes; } public void addPointsMemoryInBytes(long pointsMemoryInBytes) { this.pointsMemoryInBytes += pointsMemoryInBytes; } public void addDocValuesMemoryInBytes(long docValuesMemoryInBytes) { this.docValuesMemoryInBytes += docValuesMemoryInBytes; } public void addIndexWriterMemoryInBytes(long indexWriterMemoryInBytes) { this.indexWriterMemoryInBytes += indexWriterMemoryInBytes; } public void addVersionMapMemoryInBytes(long versionMapMemoryInBytes) { this.versionMapMemoryInBytes += versionMapMemoryInBytes; } public void addBitsetMemoryInBytes(long bitsetMemoryInBytes) { this.bitsetMemoryInBytes += bitsetMemoryInBytes; } public void addFileSizes(ImmutableOpenMap<String, Long> fileSizes) { ImmutableOpenMap.Builder<String, Long> map = ImmutableOpenMap.builder(this.fileSizes); for (Iterator<ObjectObjectCursor<String, Long>> it = fileSizes.iterator(); it.hasNext();) { ObjectObjectCursor<String, Long> entry = it.next(); if (map.containsKey(entry.key)) { Long oldValue = map.get(entry.key); map.put(entry.key, oldValue + entry.value); } else { map.put(entry.key, entry.value); } } this.fileSizes = map.build(); } public void add(SegmentsStats mergeStats) { if (mergeStats == null) { return; } add(mergeStats.count, mergeStats.memoryInBytes); addTermsMemoryInBytes(mergeStats.termsMemoryInBytes); addStoredFieldsMemoryInBytes(mergeStats.storedFieldsMemoryInBytes); addTermVectorsMemoryInBytes(mergeStats.termVectorsMemoryInBytes); addNormsMemoryInBytes(mergeStats.normsMemoryInBytes); addPointsMemoryInBytes(mergeStats.pointsMemoryInBytes); addDocValuesMemoryInBytes(mergeStats.docValuesMemoryInBytes); addIndexWriterMemoryInBytes(mergeStats.indexWriterMemoryInBytes); addVersionMapMemoryInBytes(mergeStats.versionMapMemoryInBytes); addBitsetMemoryInBytes(mergeStats.bitsetMemoryInBytes); addFileSizes(mergeStats.fileSizes); } /** * The number of segments. */ public long getCount() { return this.count; } /** * Estimation of the memory usage used by a segment. */ public long getMemoryInBytes() { return this.memoryInBytes; } public ByteSizeValue getMemory() { return new ByteSizeValue(memoryInBytes); } /** * Estimation of the terms dictionary memory usage by a segment. */ public long getTermsMemoryInBytes() { return this.termsMemoryInBytes; } public ByteSizeValue getTermsMemory() { return new ByteSizeValue(termsMemoryInBytes); } /** * Estimation of the stored fields memory usage by a segment. */ public long getStoredFieldsMemoryInBytes() { return this.storedFieldsMemoryInBytes; } public ByteSizeValue getStoredFieldsMemory() { return new ByteSizeValue(storedFieldsMemoryInBytes); } /** * Estimation of the term vectors memory usage by a segment. */ public long getTermVectorsMemoryInBytes() { return this.termVectorsMemoryInBytes; } public ByteSizeValue getTermVectorsMemory() { return new ByteSizeValue(termVectorsMemoryInBytes); } /** * Estimation of the norms memory usage by a segment. */ public long getNormsMemoryInBytes() { return this.normsMemoryInBytes; } public ByteSizeValue getNormsMemory() { return new ByteSizeValue(normsMemoryInBytes); } /** * Estimation of the points memory usage by a segment. */ public long getPointsMemoryInBytes() { return this.pointsMemoryInBytes; } public ByteSizeValue getPointsMemory() { return new ByteSizeValue(pointsMemoryInBytes); } /** * Estimation of the doc values memory usage by a segment. */ public long getDocValuesMemoryInBytes() { return this.docValuesMemoryInBytes; } public ByteSizeValue getDocValuesMemory() { return new ByteSizeValue(docValuesMemoryInBytes); } /** * Estimation of the memory usage by index writer */ public long getIndexWriterMemoryInBytes() { return this.indexWriterMemoryInBytes; } public ByteSizeValue getIndexWriterMemory() { return new ByteSizeValue(indexWriterMemoryInBytes); } /** * Estimation of the memory usage by version map */ public long getVersionMapMemoryInBytes() { return this.versionMapMemoryInBytes; } public ByteSizeValue getVersionMapMemory() { return new ByteSizeValue(versionMapMemoryInBytes); } /** * Estimation of how much the cached bit sets are taking. (which nested and p/c rely on) */ public long getBitsetMemoryInBytes() { return bitsetMemoryInBytes; } public ByteSizeValue getBitsetMemory() { return new ByteSizeValue(bitsetMemoryInBytes); } public ImmutableOpenMap<String, Long> getFileSizes() { return fileSizes; } public static SegmentsStats readSegmentsStats(StreamInput in) throws IOException { SegmentsStats stats = new SegmentsStats(); stats.readFrom(in); return stats; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(Fields.SEGMENTS); builder.field(Fields.COUNT, count); builder.byteSizeField(Fields.MEMORY_IN_BYTES, Fields.MEMORY, memoryInBytes); builder.byteSizeField(Fields.TERMS_MEMORY_IN_BYTES, Fields.TERMS_MEMORY, termsMemoryInBytes); builder.byteSizeField(Fields.STORED_FIELDS_MEMORY_IN_BYTES, Fields.STORED_FIELDS_MEMORY, storedFieldsMemoryInBytes); builder.byteSizeField(Fields.TERM_VECTORS_MEMORY_IN_BYTES, Fields.TERM_VECTORS_MEMORY, termVectorsMemoryInBytes); builder.byteSizeField(Fields.NORMS_MEMORY_IN_BYTES, Fields.NORMS_MEMORY, normsMemoryInBytes); builder.byteSizeField(Fields.POINTS_MEMORY_IN_BYTES, Fields.POINTS_MEMORY, pointsMemoryInBytes); builder.byteSizeField(Fields.DOC_VALUES_MEMORY_IN_BYTES, Fields.DOC_VALUES_MEMORY, docValuesMemoryInBytes); builder.byteSizeField(Fields.INDEX_WRITER_MEMORY_IN_BYTES, Fields.INDEX_WRITER_MEMORY, indexWriterMemoryInBytes); builder.byteSizeField(Fields.VERSION_MAP_MEMORY_IN_BYTES, Fields.VERSION_MAP_MEMORY, versionMapMemoryInBytes); builder.byteSizeField(Fields.FIXED_BIT_SET_MEMORY_IN_BYTES, Fields.FIXED_BIT_SET, bitsetMemoryInBytes); builder.startObject(Fields.FILE_SIZES); for (Iterator<ObjectObjectCursor<String, Long>> it = fileSizes.iterator(); it.hasNext();) { ObjectObjectCursor<String, Long> entry = it.next(); builder.startObject(entry.key); builder.byteSizeField(Fields.SIZE_IN_BYTES, Fields.SIZE, entry.value); builder.field(Fields.DESCRIPTION, fileDescriptions.getOrDefault(entry.key, "Others")); builder.endObject(); } builder.endObject(); builder.endObject(); return builder; } static final class Fields { static final String SEGMENTS = "segments"; static final String COUNT = "count"; static final String MEMORY = "memory"; static final String MEMORY_IN_BYTES = "memory_in_bytes"; static final String TERMS_MEMORY = "terms_memory"; static final String TERMS_MEMORY_IN_BYTES = "terms_memory_in_bytes"; static final String STORED_FIELDS_MEMORY = "stored_fields_memory"; static final String STORED_FIELDS_MEMORY_IN_BYTES = "stored_fields_memory_in_bytes"; static final String TERM_VECTORS_MEMORY = "term_vectors_memory"; static final String TERM_VECTORS_MEMORY_IN_BYTES = "term_vectors_memory_in_bytes"; static final String NORMS_MEMORY = "norms_memory"; static final String NORMS_MEMORY_IN_BYTES = "norms_memory_in_bytes"; static final String POINTS_MEMORY = "points_memory"; static final String POINTS_MEMORY_IN_BYTES = "points_memory_in_bytes"; static final String DOC_VALUES_MEMORY = "doc_values_memory"; static final String DOC_VALUES_MEMORY_IN_BYTES = "doc_values_memory_in_bytes"; static final String INDEX_WRITER_MEMORY = "index_writer_memory"; static final String INDEX_WRITER_MEMORY_IN_BYTES = "index_writer_memory_in_bytes"; static final String VERSION_MAP_MEMORY = "version_map_memory"; static final String VERSION_MAP_MEMORY_IN_BYTES = "version_map_memory_in_bytes"; static final String FIXED_BIT_SET = "fixed_bit_set"; static final String FIXED_BIT_SET_MEMORY_IN_BYTES = "fixed_bit_set_memory_in_bytes"; static final String FILE_SIZES = "file_sizes"; static final String SIZE = "size"; static final String SIZE_IN_BYTES = "size_in_bytes"; static final String DESCRIPTION = "description"; } @Override public void readFrom(StreamInput in) throws IOException { count = in.readVLong(); memoryInBytes = in.readLong(); termsMemoryInBytes = in.readLong(); storedFieldsMemoryInBytes = in.readLong(); termVectorsMemoryInBytes = in.readLong(); normsMemoryInBytes = in.readLong(); pointsMemoryInBytes = in.readLong(); docValuesMemoryInBytes = in.readLong(); indexWriterMemoryInBytes = in.readLong(); versionMapMemoryInBytes = in.readLong(); bitsetMemoryInBytes = in.readLong(); int size = in.readVInt(); ImmutableOpenMap.Builder<String, Long> map = ImmutableOpenMap.builder(size); for (int i = 0; i < size; i++) { String key = in.readString(); Long value = in.readLong(); map.put(key, value); } fileSizes = map.build(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeVLong(count); out.writeLong(memoryInBytes); out.writeLong(termsMemoryInBytes); out.writeLong(storedFieldsMemoryInBytes); out.writeLong(termVectorsMemoryInBytes); out.writeLong(normsMemoryInBytes); out.writeLong(pointsMemoryInBytes); out.writeLong(docValuesMemoryInBytes); out.writeLong(indexWriterMemoryInBytes); out.writeLong(versionMapMemoryInBytes); out.writeLong(bitsetMemoryInBytes); out.writeVInt(fileSizes.size()); for (Iterator<ObjectObjectCursor<String, Long>> it = fileSizes.iterator(); it.hasNext();) { ObjectObjectCursor<String, Long> entry = it.next(); out.writeString(entry.key); out.writeLong(entry.value); } } }
danielmitterdorfer/elasticsearch
core/src/main/java/org/elasticsearch/index/engine/SegmentsStats.java
Java
apache-2.0
14,688
package client const ( ConfigMapKeySelectorType = "configMapKeySelector" ConfigMapKeySelectorFieldKey = "key" ConfigMapKeySelectorFieldName = "name" ConfigMapKeySelectorFieldOptional = "optional" ) type ConfigMapKeySelector struct { Key string `json:"key,omitempty" yaml:"key,omitempty"` Name string `json:"name,omitempty" yaml:"name,omitempty"` Optional *bool `json:"optional,omitempty" yaml:"optional,omitempty"` }
cjellick/rancher
vendor/github.com/rancher/types/client/project/v3/zz_generated_config_map_key_selector.go
GO
apache-2.0
454
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.pdmodel.common.filespecification; import java.io.IOException; import org.apache.pdfbox.cos.COSBase; import org.apache.pdfbox.cos.COSDictionary; import org.apache.pdfbox.cos.COSString; import org.apache.pdfbox.pdmodel.common.COSObjectable; /** * This represents a file specification. * * @author Ben Litchfield */ public abstract class PDFileSpecification implements COSObjectable { /** * A file specfication can either be a COSString or a COSDictionary. This * will create the file specification either way. * * @param base The cos object that describes the fs. * * @return The file specification for the COSBase object. * * @throws IOException If there is an error creating the file spec. */ public static PDFileSpecification createFS( COSBase base ) throws IOException { PDFileSpecification retval = null; if( base == null ) { //then simply return null } else if( base instanceof COSString ) { retval = new PDSimpleFileSpecification( (COSString)base ); } else if( base instanceof COSDictionary ) { retval = new PDComplexFileSpecification( (COSDictionary)base ); } else { throw new IOException( "Error: Unknown file specification " + base ); } return retval; } /** * This will get the file name. * * @return The file name. */ public abstract String getFile(); /** * This will set the file name. * * @param file The name of the file. */ public abstract void setFile( String file ); }
ZhenyaM/veraPDF-pdfbox
pdfbox/src/main/java/org/apache/pdfbox/pdmodel/common/filespecification/PDFileSpecification.java
Java
apache-2.0
2,502
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.core.security.user; import org.apache.jackrabbit.core.security.SecurityConstants; import org.apache.jackrabbit.test.JUnitTest; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * <code>PasswordUtilityTest</code>... */ public class PasswordUtilityTest extends JUnitTest { private static List<String> PLAIN_PWDS = new ArrayList<String>(); static { PLAIN_PWDS.add("pw"); PLAIN_PWDS.add("PassWord123"); PLAIN_PWDS.add("_"); PLAIN_PWDS.add("{invalidAlgo}"); PLAIN_PWDS.add("{invalidAlgo}Password"); PLAIN_PWDS.add("{SHA-256}"); PLAIN_PWDS.add("pw{SHA-256}"); PLAIN_PWDS.add("p{SHA-256}w"); PLAIN_PWDS.add(""); } private static Map<String, String> HASHED_PWDS = new HashMap<String, String>(); static { for (String pw : PLAIN_PWDS) { try { HASHED_PWDS.put(pw, PasswordUtility.buildPasswordHash(pw)); } catch (Exception e) { // should not get here } } } public void testBuildPasswordHash() throws Exception { for (String pw : PLAIN_PWDS) { String pwHash = PasswordUtility.buildPasswordHash(pw); assertFalse(pw.equals(pwHash)); } List<Integer[]> l = new ArrayList<Integer[]>(); l.add(new Integer[] {0, 1000}); l.add(new Integer[] {1, 10}); l.add(new Integer[] {8, 50}); l.add(new Integer[] {10, 5}); l.add(new Integer[] {-1, -1}); for (Integer[] params : l) { for (String pw : PLAIN_PWDS) { int saltsize = params[0]; int iterations = params[1]; String pwHash = PasswordUtility.buildPasswordHash(pw, PasswordUtility.DEFAULT_ALGORITHM, saltsize, iterations); assertFalse(pw.equals(pwHash)); } } } public void testBuildPasswordHashInvalidAlgorithm() throws Exception { List<String> invalidAlgorithms = new ArrayList<String>(); invalidAlgorithms.add(""); invalidAlgorithms.add("+"); invalidAlgorithms.add("invalid"); for (String invalid : invalidAlgorithms) { try { String pwHash = PasswordUtility.buildPasswordHash("pw", invalid, PasswordUtility.DEFAULT_SALT_SIZE, PasswordUtility.DEFAULT_ITERATIONS); fail("Invalid algorithm " + invalid); } catch (NoSuchAlgorithmException e) { // success } } } public void testIsPlainTextPassword() throws Exception { for (String pw : PLAIN_PWDS) { assertTrue(pw + " should be plain text.", PasswordUtility.isPlainTextPassword(pw)); } } public void testIsPlainTextForNull() throws Exception { assertTrue(PasswordUtility.isPlainTextPassword(null)); } public void testIsPlainTextForPwHash() throws Exception { for (String pwHash : HASHED_PWDS.values()) { assertFalse(pwHash + " should not be plain text.", PasswordUtility.isPlainTextPassword(pwHash)); } } public void testIsSame() throws Exception { for (String pw : HASHED_PWDS.keySet()) { String pwHash = HASHED_PWDS.get(pw); assertTrue("Not the same " + pw + ", " + pwHash, PasswordUtility.isSame(pwHash, pw)); } String pw = "password"; String pwHash = PasswordUtility.buildPasswordHash(pw, SecurityConstants.DEFAULT_DIGEST, 4, 50); assertTrue("Not the same '" + pw + "', " + pwHash, PasswordUtility.isSame(pwHash, pw)); pwHash = PasswordUtility.buildPasswordHash(pw, "md5", 0, 5); assertTrue("Not the same '" + pw + "', " + pwHash, PasswordUtility.isSame(pwHash, pw)); pwHash = PasswordUtility.buildPasswordHash(pw, "md5", -1, -1); assertTrue("Not the same '" + pw + "', " + pwHash, PasswordUtility.isSame(pwHash, pw)); } public void testIsNotSame() throws Exception { String previous = null; for (String pw : HASHED_PWDS.keySet()) { String pwHash = HASHED_PWDS.get(pw); assertFalse(pw, PasswordUtility.isSame(pw, pw)); assertFalse(pwHash, PasswordUtility.isSame(pwHash, pwHash)); if (previous != null) { assertFalse(previous, PasswordUtility.isSame(pwHash, previous)); } previous = pw; } } public void testExtractAlgorithmFromPlainPw() throws Exception { for (String pw : PLAIN_PWDS) { assertNull(pw + " is no pw-hash -> no algorithm expected.", PasswordUtility.extractAlgorithm(pw)); } } public void testExtractAlgorithmFromNull() throws Exception { assertNull("null pw -> no algorithm expected.", PasswordUtility.extractAlgorithm(null)); } public void testExtractAlgorithmFromPwHash() throws Exception { for (String pwHash : HASHED_PWDS.values()) { String algorithm = PasswordUtility.extractAlgorithm(pwHash); assertNotNull(pwHash + " is pw-hash -> algorithm expected.", algorithm); assertEquals("Wrong algorithm extracted from " + pwHash, PasswordUtility.DEFAULT_ALGORITHM, algorithm); } String pwHash = PasswordUtility.buildPasswordHash("pw", SecurityConstants.DEFAULT_DIGEST, 4, 50); assertEquals(SecurityConstants.DEFAULT_DIGEST, PasswordUtility.extractAlgorithm(pwHash)); pwHash = PasswordUtility.buildPasswordHash("pw", "md5", 0, 5); assertEquals("md5", PasswordUtility.extractAlgorithm(pwHash)); pwHash = PasswordUtility.buildPasswordHash("pw", "md5", -1, -1); assertEquals("md5", PasswordUtility.extractAlgorithm(pwHash)); } }
Overseas-Student-Living/jackrabbit
jackrabbit-core/src/test/java/org/apache/jackrabbit/core/security/user/PasswordUtilityTest.java
Java
apache-2.0
6,666
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.intellij.plugins.relaxNG.compact; import com.intellij.codeInsight.TailType; import com.intellij.codeInsight.completion.*; import com.intellij.codeInsight.lookup.LookupElementBuilder; import com.intellij.codeInsight.lookup.TailTypeDecorator; import com.intellij.patterns.ElementPattern; import com.intellij.patterns.PsiElementPattern; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiWhiteSpace; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.util.ArrayUtil; import com.intellij.util.ProcessingContext; import org.intellij.plugins.relaxNG.compact.psi.RncDecl; import org.intellij.plugins.relaxNG.compact.psi.RncDefine; import org.intellij.plugins.relaxNG.compact.psi.RncGrammar; import org.intellij.plugins.relaxNG.compact.psi.util.EscapeUtil; import org.jetbrains.annotations.NotNull; import static com.intellij.patterns.PlatformPatterns.psiElement; import static com.intellij.patterns.StandardPatterns.and; import static com.intellij.patterns.StandardPatterns.not; /** * @author Dennis.Ushakov */ public class RncCompletionContributor extends CompletionContributor { private static final ElementPattern TOP_LEVEL = not(psiElement().inside(psiElement(RncGrammar.class) .inside(true, psiElement(RncGrammar.class)))); private static final PsiElementPattern DECL_PATTERN = psiElement().inside(psiElement(RncDecl.class)); private static final PsiElementPattern DEFAULT_PATTERN = DECL_PATTERN.afterLeaf(psiElement().withText("default")); private static final ElementPattern DEFINE_PATTERN = and(psiElement().withParent(RncDefine.class), psiElement().afterLeafSkipping(psiElement(PsiWhiteSpace.class), psiElement().withText("="))); private static final String[] DECL_KEYWORDS = new String[]{ "default", "namespace", "datatypes" }; private static final String[] GRAMMAR_CONTENT_KEYWORDS = new String[]{ "include", "div", "start" }; private static final String[] PATTERN_KEYWORDS = new String[]{ "attribute", "element", "grammar", "notAllowed", "text", "empty", "external", "parent", "list", "mixed" }; public RncCompletionContributor() { CompletionProvider<CompletionParameters> provider = new CompletionProvider<CompletionParameters>() { @Override protected void addCompletions(@NotNull CompletionParameters parameters, ProcessingContext context, @NotNull CompletionResultSet result) { String[] keywords = getKeywords(parameters.getPosition()); for (String keyword : keywords) { result.addElement(TailTypeDecorator.withTail(LookupElementBuilder.create(keyword).bold(), TailType.SPACE)); } } }; extend(null, psiElement().afterLeaf(psiElement(RncTokenTypes.KEYWORD_DEFAULT)), provider); extend(null, psiElement().andNot(psiElement().inside(psiElement(RncTokenTypes.LITERAL))). andNot(psiElement().afterLeaf(psiElement().withElementType(RncTokenTypes.KEYWORDS))), provider); } private static String[] getKeywords(PsiElement context) { final PsiElement next = PsiTreeUtil.skipWhitespacesForward(context); if (next != null && EscapeUtil.unescapeText(next).equals("=")) { return new String[]{ "start" }; } if (DEFAULT_PATTERN.accepts(context)) { return new String[]{ "namespace" }; } else if (DECL_PATTERN.accepts(context)) { return ArrayUtil.EMPTY_STRING_ARRAY; } else if (context.getParent() instanceof RncDefine && context.getParent().getFirstChild() == context) { if (DEFINE_PATTERN.accepts(context)) { return ArrayUtil.EMPTY_STRING_ARRAY; } if (TOP_LEVEL.accepts(context)) { if (!afterPattern(context)) { return ArrayUtil.mergeArrays(DECL_KEYWORDS, ArrayUtil.mergeArrays(GRAMMAR_CONTENT_KEYWORDS, PATTERN_KEYWORDS)); } } return GRAMMAR_CONTENT_KEYWORDS; } return PATTERN_KEYWORDS; } private static boolean afterPattern(PsiElement context) { // TODO: recognize all patterns return PsiTreeUtil.getPrevSiblingOfType(context.getParent(), RncDefine.class) != null; } }
asedunov/intellij-community
xml/relaxng/src/org/intellij/plugins/relaxNG/compact/RncCompletionContributor.java
Java
apache-2.0
4,782
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.ui.trans.steps.zipfile; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CCombo; import org.eclipse.swt.events.FocusListener; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.ShellAdapter; import org.eclipse.swt.events.ShellEvent; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Group; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.MessageBox; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Text; import org.pentaho.di.core.Const; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDialogInterface; import org.pentaho.di.trans.steps.zipfile.ZipFileMeta; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.trans.step.BaseStepDialog; public class ZipFileDialog extends BaseStepDialog implements StepDialogInterface { private static Class<?> PKG = ZipFileMeta.class; // for i18n purposes, needed by Translator2!! private Label wlSourceFileNameField; private CCombo wSourceFileNameField; private FormData fdlSourceFileNameField, fdSourceFileNameField; private Label wlTargetFileNameField; private CCombo wTargetFileNameField; private FormData fdlTargetFileNameField, fdTargetFileNameField; private Button wAddResult; private FormData fdAddResult, fdlAddResult; private Label wlAddResult; private Button wOverwriteZipEntry; private FormData fdOverwriteTarget, fdlOverwriteTarget; private Label wlOverwriteTarget; private Button wCreateParentFolder; private FormData fdCreateParentFolder, fdlCreateParentFolder; private Label wlCreateParentFolder; private Button wKeepFolders; private FormData fdKeepFolders, fdlKeepFolders; private Label wlKeepFolders; private Group wSettingsGroup; private FormData fdSettingsGroup; private ZipFileMeta input; private Label wlBaseFolderField; private CCombo wBaseFolderField; private FormData fdlBaseFolderField, fdBaseFolderField; private Label wlOperation; private CCombo wOperation; private FormData fdlOperation; private FormData fdOperation; private Label wlMoveToFolderField; private CCombo wMoveToFolderField; private FormData fdlMoveToFolderField, fdMoveToFolderField; private boolean gotPreviousFields = false; public ZipFileDialog( Shell parent, Object in, TransMeta transMeta, String sname ) { super( parent, (BaseStepMeta) in, transMeta, sname ); input = (ZipFileMeta) in; } public String open() { Shell parent = getParent(); Display display = parent.getDisplay(); shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN ); props.setLook( shell ); setShellImage( shell, input ); ModifyListener lsMod = new ModifyListener() { public void modifyText( ModifyEvent e ) { input.setChanged(); } }; SelectionAdapter lsSel = new SelectionAdapter() { public void widgetSelected( SelectionEvent arg0 ) { input.setChanged(); } }; changed = input.hasChanged(); FormLayout formLayout = new FormLayout(); formLayout.marginWidth = Const.FORM_MARGIN; formLayout.marginHeight = Const.FORM_MARGIN; shell.setLayout( formLayout ); shell.setText( BaseMessages.getString( PKG, "ZipFileDialog.Shell.Title" ) ); int middle = props.getMiddlePct(); int margin = Const.MARGIN; // Stepname line wlStepname = new Label( shell, SWT.RIGHT ); wlStepname.setText( BaseMessages.getString( PKG, "ZipFileDialog.Stepname.Label" ) ); props.setLook( wlStepname ); fdlStepname = new FormData(); fdlStepname.left = new FormAttachment( 0, 0 ); fdlStepname.right = new FormAttachment( middle, -margin ); fdlStepname.top = new FormAttachment( 0, margin ); wlStepname.setLayoutData( fdlStepname ); wStepname = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER ); wStepname.setText( stepname ); props.setLook( wStepname ); wStepname.addModifyListener( lsMod ); fdStepname = new FormData(); fdStepname.left = new FormAttachment( middle, 0 ); fdStepname.top = new FormAttachment( 0, margin ); fdStepname.right = new FormAttachment( 100, 0 ); wStepname.setLayoutData( fdStepname ); // /////////////////////////////// // START OF Settings GROUP // // /////////////////////////////// wSettingsGroup = new Group( shell, SWT.SHADOW_NONE ); props.setLook( wSettingsGroup ); wSettingsGroup.setText( BaseMessages.getString( PKG, "ZipFileDialog.wSettingsGroup.Label" ) ); FormLayout settingGroupLayout = new FormLayout(); settingGroupLayout.marginWidth = 10; settingGroupLayout.marginHeight = 10; wSettingsGroup.setLayout( settingGroupLayout ); // Create target parent folder? wlCreateParentFolder = new Label( wSettingsGroup, SWT.RIGHT ); wlCreateParentFolder.setText( BaseMessages.getString( PKG, "ZipFileDialog.CreateParentFolder.Label" ) ); props.setLook( wlCreateParentFolder ); fdlCreateParentFolder = new FormData(); fdlCreateParentFolder.left = new FormAttachment( 0, 0 ); fdlCreateParentFolder.top = new FormAttachment( wStepname, margin ); fdlCreateParentFolder.right = new FormAttachment( middle, -margin ); wlCreateParentFolder.setLayoutData( fdlCreateParentFolder ); wCreateParentFolder = new Button( wSettingsGroup, SWT.CHECK ); props.setLook( wCreateParentFolder ); wCreateParentFolder.setToolTipText( BaseMessages.getString( PKG, "ZipFileDialog.CreateParentFolder.Tooltip" ) ); fdCreateParentFolder = new FormData(); fdCreateParentFolder.left = new FormAttachment( middle, 0 ); fdCreateParentFolder.top = new FormAttachment( wStepname, margin ); wCreateParentFolder.setLayoutData( fdCreateParentFolder ); wCreateParentFolder.addSelectionListener( lsSel ); // Overwrite target file? wlOverwriteTarget = new Label( wSettingsGroup, SWT.RIGHT ); wlOverwriteTarget.setText( BaseMessages.getString( PKG, "ZipFileDialog.OverwriteTarget.Label" ) ); props.setLook( wlOverwriteTarget ); fdlOverwriteTarget = new FormData(); fdlOverwriteTarget.left = new FormAttachment( 0, 0 ); fdlOverwriteTarget.top = new FormAttachment( wCreateParentFolder, margin ); fdlOverwriteTarget.right = new FormAttachment( middle, -margin ); wlOverwriteTarget.setLayoutData( fdlOverwriteTarget ); wOverwriteZipEntry = new Button( wSettingsGroup, SWT.CHECK ); props.setLook( wOverwriteZipEntry ); wOverwriteZipEntry.setToolTipText( BaseMessages.getString( PKG, "ZipFileDialog.OverwriteTarget.Tooltip" ) ); fdOverwriteTarget = new FormData(); fdOverwriteTarget.left = new FormAttachment( middle, 0 ); fdOverwriteTarget.top = new FormAttachment( wCreateParentFolder, margin ); wOverwriteZipEntry.setLayoutData( fdOverwriteTarget ); wOverwriteZipEntry.addSelectionListener( lsSel ); // Add Target filename to result filenames? wlAddResult = new Label( wSettingsGroup, SWT.RIGHT ); wlAddResult.setText( BaseMessages.getString( PKG, "ZipFileDialog.AddResult.Label" ) ); props.setLook( wlAddResult ); fdlAddResult = new FormData(); fdlAddResult.left = new FormAttachment( 0, 0 ); fdlAddResult.top = new FormAttachment( wOverwriteZipEntry, margin ); fdlAddResult.right = new FormAttachment( middle, -margin ); wlAddResult.setLayoutData( fdlAddResult ); wAddResult = new Button( wSettingsGroup, SWT.CHECK ); props.setLook( wAddResult ); wAddResult.setToolTipText( BaseMessages.getString( PKG, "ZipFileDialog.AddResult.Tooltip" ) ); fdAddResult = new FormData(); fdAddResult.left = new FormAttachment( middle, 0 ); fdAddResult.top = new FormAttachment( wOverwriteZipEntry, margin ); wAddResult.setLayoutData( fdAddResult ); wAddResult.addSelectionListener( lsSel ); fdSettingsGroup = new FormData(); fdSettingsGroup.left = new FormAttachment( 0, margin ); fdSettingsGroup.top = new FormAttachment( wStepname, margin ); fdSettingsGroup.right = new FormAttachment( 100, -margin ); wSettingsGroup.setLayoutData( fdSettingsGroup ); // /////////////////////////////// // END OF Settings Fields GROUP // // /////////////////////////////// // SourceFileNameField field wlSourceFileNameField = new Label( shell, SWT.RIGHT ); wlSourceFileNameField.setText( BaseMessages.getString( PKG, "ZipFileDialog.SourceFileNameField.Label" ) ); props.setLook( wlSourceFileNameField ); fdlSourceFileNameField = new FormData(); fdlSourceFileNameField.left = new FormAttachment( 0, 0 ); fdlSourceFileNameField.right = new FormAttachment( middle, -margin ); fdlSourceFileNameField.top = new FormAttachment( wSettingsGroup, 2 * margin ); wlSourceFileNameField.setLayoutData( fdlSourceFileNameField ); wSourceFileNameField = new CCombo( shell, SWT.BORDER | SWT.READ_ONLY ); props.setLook( wSourceFileNameField ); wSourceFileNameField.setEditable( true ); wSourceFileNameField.addModifyListener( lsMod ); fdSourceFileNameField = new FormData(); fdSourceFileNameField.left = new FormAttachment( middle, 0 ); fdSourceFileNameField.top = new FormAttachment( wSettingsGroup, 2 * margin ); fdSourceFileNameField.right = new FormAttachment( 100, -margin ); wSourceFileNameField.setLayoutData( fdSourceFileNameField ); wSourceFileNameField.addFocusListener( new FocusListener() { public void focusLost( org.eclipse.swt.events.FocusEvent e ) { } public void focusGained( org.eclipse.swt.events.FocusEvent e ) { get(); } } ); // TargetFileNameField field wlTargetFileNameField = new Label( shell, SWT.RIGHT ); wlTargetFileNameField.setText( BaseMessages.getString( PKG, "ZipFileDialog.TargetFileNameField.Label" ) ); props.setLook( wlTargetFileNameField ); fdlTargetFileNameField = new FormData(); fdlTargetFileNameField.left = new FormAttachment( 0, 0 ); fdlTargetFileNameField.right = new FormAttachment( middle, -margin ); fdlTargetFileNameField.top = new FormAttachment( wSourceFileNameField, margin ); wlTargetFileNameField.setLayoutData( fdlTargetFileNameField ); wTargetFileNameField = new CCombo( shell, SWT.BORDER | SWT.READ_ONLY ); wTargetFileNameField.setEditable( true ); props.setLook( wTargetFileNameField ); wTargetFileNameField.addModifyListener( lsMod ); fdTargetFileNameField = new FormData(); fdTargetFileNameField.left = new FormAttachment( middle, 0 ); fdTargetFileNameField.top = new FormAttachment( wSourceFileNameField, margin ); fdTargetFileNameField.right = new FormAttachment( 100, -margin ); wTargetFileNameField.setLayoutData( fdTargetFileNameField ); wTargetFileNameField.addFocusListener( new FocusListener() { public void focusLost( org.eclipse.swt.events.FocusEvent e ) { } public void focusGained( org.eclipse.swt.events.FocusEvent e ) { get(); } } ); wlKeepFolders = new Label( shell, SWT.RIGHT ); wlKeepFolders.setText( BaseMessages.getString( PKG, "ZipFileDialog.KeepFolders.Label" ) ); props.setLook( wlKeepFolders ); fdlKeepFolders = new FormData(); fdlKeepFolders.left = new FormAttachment( 0, 0 ); fdlKeepFolders.top = new FormAttachment( wTargetFileNameField, margin ); fdlKeepFolders.right = new FormAttachment( middle, -margin ); wlKeepFolders.setLayoutData( fdlKeepFolders ); wKeepFolders = new Button( shell, SWT.CHECK ); props.setLook( wKeepFolders ); wKeepFolders.setToolTipText( BaseMessages.getString( PKG, "ZipFileDialog.KeepFolders.Tooltip" ) ); fdKeepFolders = new FormData(); fdKeepFolders.left = new FormAttachment( middle, 0 ); fdKeepFolders.top = new FormAttachment( wTargetFileNameField, margin ); wKeepFolders.setLayoutData( fdKeepFolders ); wKeepFolders.addSelectionListener( lsSel ); wKeepFolders.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected( SelectionEvent arg0 ) { keepFolder(); } } ); // BaseFolderField field wlBaseFolderField = new Label( shell, SWT.RIGHT ); wlBaseFolderField.setText( BaseMessages.getString( PKG, "ZipFileDialog.BaseFolderField.Label" ) ); props.setLook( wlBaseFolderField ); fdlBaseFolderField = new FormData(); fdlBaseFolderField.left = new FormAttachment( 0, 0 ); fdlBaseFolderField.right = new FormAttachment( middle, -margin ); fdlBaseFolderField.top = new FormAttachment( wKeepFolders, margin ); wlBaseFolderField.setLayoutData( fdlBaseFolderField ); wBaseFolderField = new CCombo( shell, SWT.BORDER | SWT.READ_ONLY ); wBaseFolderField.setEditable( true ); props.setLook( wBaseFolderField ); wBaseFolderField.addModifyListener( lsMod ); fdBaseFolderField = new FormData(); fdBaseFolderField.left = new FormAttachment( middle, 0 ); fdBaseFolderField.top = new FormAttachment( wKeepFolders, margin ); fdBaseFolderField.right = new FormAttachment( 100, -margin ); wBaseFolderField.setLayoutData( fdBaseFolderField ); wBaseFolderField.addFocusListener( new FocusListener() { public void focusLost( org.eclipse.swt.events.FocusEvent e ) { } public void focusGained( org.eclipse.swt.events.FocusEvent e ) { get(); } } ); // Operation wlOperation = new Label( shell, SWT.RIGHT ); wlOperation.setText( BaseMessages.getString( PKG, "ZipFileDialog.Operation.Label" ) ); props.setLook( wlOperation ); fdlOperation = new FormData(); fdlOperation.left = new FormAttachment( 0, 0 ); fdlOperation.right = new FormAttachment( middle, -margin ); fdlOperation.top = new FormAttachment( wBaseFolderField, margin ); wlOperation.setLayoutData( fdlOperation ); wOperation = new CCombo( shell, SWT.BORDER | SWT.READ_ONLY ); props.setLook( wOperation ); wOperation.addModifyListener( lsMod ); fdOperation = new FormData(); fdOperation.left = new FormAttachment( middle, 0 ); fdOperation.top = new FormAttachment( wBaseFolderField, margin ); fdOperation.right = new FormAttachment( 100, -margin ); wOperation.setLayoutData( fdOperation ); wOperation.setItems( ZipFileMeta.operationTypeDesc ); wOperation.addSelectionListener( new SelectionAdapter() { public void widgetSelected( SelectionEvent e ) { updateOperation(); } } ); // MoveToFolderField field wlMoveToFolderField = new Label( shell, SWT.RIGHT ); wlMoveToFolderField.setText( BaseMessages.getString( PKG, "ZipFileDialog.MoveToFolderField.Label" ) ); props.setLook( wlMoveToFolderField ); fdlMoveToFolderField = new FormData(); fdlMoveToFolderField.left = new FormAttachment( 0, 0 ); fdlMoveToFolderField.right = new FormAttachment( middle, -margin ); fdlMoveToFolderField.top = new FormAttachment( wOperation, margin ); wlMoveToFolderField.setLayoutData( fdlMoveToFolderField ); wMoveToFolderField = new CCombo( shell, SWT.BORDER | SWT.READ_ONLY ); wMoveToFolderField.setEditable( true ); props.setLook( wMoveToFolderField ); wMoveToFolderField.addModifyListener( lsMod ); fdMoveToFolderField = new FormData(); fdMoveToFolderField.left = new FormAttachment( middle, 0 ); fdMoveToFolderField.top = new FormAttachment( wOperation, margin ); fdMoveToFolderField.right = new FormAttachment( 100, -margin ); wMoveToFolderField.setLayoutData( fdMoveToFolderField ); wMoveToFolderField.addFocusListener( new FocusListener() { public void focusLost( org.eclipse.swt.events.FocusEvent e ) { } public void focusGained( org.eclipse.swt.events.FocusEvent e ) { get(); } } ); // THE BUTTONS wOK = new Button( shell, SWT.PUSH ); wOK.setText( BaseMessages.getString( PKG, "System.Button.OK" ) ); wCancel = new Button( shell, SWT.PUSH ); wCancel.setText( BaseMessages.getString( PKG, "System.Button.Cancel" ) ); setButtonPositions( new Button[] { wOK, wCancel }, margin, wMoveToFolderField ); // Add listeners lsOK = new Listener() { public void handleEvent( Event e ) { ok(); } }; lsCancel = new Listener() { public void handleEvent( Event e ) { cancel(); } }; wOK.addListener( SWT.Selection, lsOK ); wCancel.addListener( SWT.Selection, lsCancel ); lsDef = new SelectionAdapter() { public void widgetDefaultSelected( SelectionEvent e ) { ok(); } }; wStepname.addSelectionListener( lsDef ); // Detect X or ALT-F4 or something that kills this window... shell.addShellListener( new ShellAdapter() { public void shellClosed( ShellEvent e ) { cancel(); } } ); // Set the shell size, based upon previous time... setSize(); getData(); keepFolder(); updateOperation(); input.setChanged( changed ); shell.open(); while ( !shell.isDisposed() ) { if ( !display.readAndDispatch() ) { display.sleep(); } } return stepname; } /** * Copy information from the meta-data input to the dialog fields. */ public void getData() { if ( log.isDebug() ) { log.logDebug( toString(), BaseMessages.getString( PKG, "ZipFileDialog.Log.GettingKeyInfo" ) ); } if ( input.getBaseFolderField() != null ) { wBaseFolderField.setText( input.getBaseFolderField() ); } if ( input.getDynamicSourceFileNameField() != null ) { wSourceFileNameField.setText( input.getDynamicSourceFileNameField() ); } if ( input.getDynamicTargetFileNameField() != null ) { wTargetFileNameField.setText( input.getDynamicTargetFileNameField() ); } wOperation.setText( ZipFileMeta.getOperationTypeDesc( input.getOperationType() ) ); if ( input.getMoveToFolderField() != null ) { wMoveToFolderField.setText( input.getMoveToFolderField() ); } wAddResult.setSelection( input.isaddTargetFileNametoResult() ); wOverwriteZipEntry.setSelection( input.isOverwriteZipEntry() ); wCreateParentFolder.setSelection( input.isCreateParentFolder() ); wKeepFolders.setSelection( input.isKeepSouceFolder() ); wStepname.selectAll(); wStepname.setFocus(); } private void cancel() { stepname = null; input.setChanged( changed ); dispose(); } private void ok() { if ( Utils.isEmpty( wStepname.getText() ) ) { MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR ); mb.setMessage( BaseMessages.getString( PKG, "System.Error.StepNameMissing.Message" ) ); mb.setText( BaseMessages.getString( PKG, "System.Error.StepNameMissing.Title" ) ); mb.open(); return; } input.setBaseFolderField( wBaseFolderField.getText() ); input.setDynamicSourceFileNameField( wSourceFileNameField.getText() ); input.setDynamicTargetFileNameField( wTargetFileNameField.getText() ); input.setaddTargetFileNametoResult( wAddResult.getSelection() ); input.setOverwriteZipEntry( wOverwriteZipEntry.getSelection() ); input.setCreateParentFolder( wCreateParentFolder.getSelection() ); input.setKeepSouceFolder( wKeepFolders.getSelection() ); input.setOperationType( ZipFileMeta.getOperationTypeByDesc( wOperation.getText() ) ); input.setMoveToFolderField( wMoveToFolderField.getText() ); stepname = wStepname.getText(); // return value dispose(); } private void keepFolder() { wlBaseFolderField.setEnabled( wKeepFolders.getSelection() ); wBaseFolderField.setEnabled( wKeepFolders.getSelection() ); } private void get() { if ( !gotPreviousFields ) { gotPreviousFields = true; String source = wSourceFileNameField.getText(); String target = wTargetFileNameField.getText(); String base = wBaseFolderField.getText(); try { wSourceFileNameField.removeAll(); wTargetFileNameField.removeAll(); wBaseFolderField.removeAll(); RowMetaInterface r = transMeta.getPrevStepFields( stepname ); if ( r != null ) { String[] fields = r.getFieldNames(); wSourceFileNameField.setItems( fields ); wTargetFileNameField.setItems( fields ); wBaseFolderField.setItems( fields ); } } catch ( KettleException ke ) { new ErrorDialog( shell, BaseMessages.getString( PKG, "ZipFileDialog.FailedToGetFields.DialogTitle" ), BaseMessages .getString( PKG, "ZipFileDialog.FailedToGetFields.DialogMessage" ), ke ); } finally { if ( source != null ) { wSourceFileNameField.setText( source ); } if ( target != null ) { wTargetFileNameField.setText( target ); } if ( base != null ) { wBaseFolderField.setText( base ); } } } } private void updateOperation() { wlMoveToFolderField .setEnabled( ZipFileMeta.getOperationTypeByDesc( wOperation.getText() ) == ZipFileMeta.OPERATION_TYPE_MOVE ); wMoveToFolderField .setEnabled( ZipFileMeta.getOperationTypeByDesc( wOperation.getText() ) == ZipFileMeta.OPERATION_TYPE_MOVE ); } }
nicoben/pentaho-kettle
ui/src/org/pentaho/di/ui/trans/steps/zipfile/ZipFileDialog.java
Java
apache-2.0
22,737
# Copyright (c) - 2014, Clinton Knight All rights reserved. # Copyright (c) - 2015, Alex Meade. All Rights Reserved. # Copyright (c) - 2015, Rushil Chugh. All Rights Reserved. # Copyright (c) - 2015, Tom Barron. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from cinder.volume import configuration as conf import cinder.volume.drivers.netapp.options as na_opts ISCSI_FAKE_LUN_ID = 1 ISCSI_FAKE_IQN = 'iqn.1993-08.org.debian:01:10' ISCSI_FAKE_ADDRESS = '10.63.165.216' ISCSI_FAKE_PORT = '2232' ISCSI_FAKE_VOLUME = {'id': 'fake_id'} ISCSI_FAKE_TARGET = {} ISCSI_FAKE_TARGET['address'] = ISCSI_FAKE_ADDRESS ISCSI_FAKE_TARGET['port'] = ISCSI_FAKE_PORT ISCSI_FAKE_VOLUME = {'id': 'fake_id', 'provider_auth': 'None stack password'} FC_ISCSI_TARGET_INFO_DICT = {'target_discovered': False, 'target_portal': '10.63.165.216:2232', 'target_iqn': ISCSI_FAKE_IQN, 'target_lun': ISCSI_FAKE_LUN_ID, 'volume_id': ISCSI_FAKE_VOLUME['id'], 'auth_method': 'None', 'auth_username': 'stack', 'auth_password': 'password'} VOLUME_NAME = 'fake_volume_name' VOLUME_ID = 'fake_volume_id' VOLUME_TYPE_ID = 'fake_volume_type_id' VOLUME = { 'name': VOLUME_NAME, 'size': 42, 'id': VOLUME_ID, 'host': 'fake_host@fake_backend#fake_pool', 'volume_type_id': VOLUME_TYPE_ID, } SNAPSHOT_NAME = 'fake_snapshot_name' SNAPSHOT_ID = 'fake_snapshot_id' SNAPSHOT = { 'name': SNAPSHOT_NAME, 'id': SNAPSHOT_ID, 'volume_id': VOLUME_ID, 'volume_name': VOLUME_NAME, 'volume_size': 42, } QOS_SPECS = {} EXTRA_SPECS = {} MAX_THROUGHPUT = '21734278B/s' QOS_POLICY_GROUP_NAME = 'fake_qos_policy_group_name' LEGACY_EXTRA_SPECS = {'netapp:qos_policy_group': QOS_POLICY_GROUP_NAME} LEGACY_QOS = { 'policy_name': QOS_POLICY_GROUP_NAME, } QOS_POLICY_GROUP_SPEC = { 'max_throughput': MAX_THROUGHPUT, 'policy_name': 'openstack-%s' % VOLUME_ID, } QOS_POLICY_GROUP_INFO_NONE = {'legacy': None, 'spec': None} QOS_POLICY_GROUP_INFO = {'legacy': None, 'spec': QOS_POLICY_GROUP_SPEC} LEGACY_QOS_POLICY_GROUP_INFO = { 'legacy': LEGACY_QOS, 'spec': None, } INVALID_QOS_POLICY_GROUP_INFO = { 'legacy': LEGACY_QOS, 'spec': QOS_POLICY_GROUP_SPEC, } QOS_SPECS_ID = 'fake_qos_specs_id' QOS_SPEC = {'maxBPS': 21734278} OUTER_BACKEND_QOS_SPEC = { 'id': QOS_SPECS_ID, 'specs': QOS_SPEC, 'consumer': 'back-end', } OUTER_FRONTEND_QOS_SPEC = { 'id': QOS_SPECS_ID, 'specs': QOS_SPEC, 'consumer': 'front-end', } OUTER_BOTH_QOS_SPEC = { 'id': QOS_SPECS_ID, 'specs': QOS_SPEC, 'consumer': 'both', } VOLUME_TYPE = {'id': VOLUME_TYPE_ID, 'qos_specs_id': QOS_SPECS_ID} def create_configuration(): config = conf.Configuration(None) config.append_config_values(na_opts.netapp_connection_opts) config.append_config_values(na_opts.netapp_transport_opts) config.append_config_values(na_opts.netapp_basicauth_opts) config.append_config_values(na_opts.netapp_provisioning_opts) return config def create_configuration_7mode(): config = create_configuration() config.append_config_values(na_opts.netapp_7mode_opts) return config def create_configuration_cmode(): config = create_configuration() config.append_config_values(na_opts.netapp_cluster_opts) return config
Hybrid-Cloud/cinder
cinder/tests/unit/volume/drivers/netapp/fakes.py
Python
apache-2.0
3,984
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.model; /** * Various constants. * * @version */ public final class Constants { public static final String JAXB_CONTEXT_PACKAGES = "" + "org.apache.camel:" + "org.apache.camel.model:" + "org.apache.camel.model.config:" + "org.apache.camel.model.dataformat:" + "org.apache.camel.model.language:" + "org.apache.camel.model.loadbalancer:" + "org.apache.camel.model.remote:" + "org.apache.camel.model.rest"; public static final String PLACEHOLDER_QNAME = "http://camel.apache.org/schema/placeholder"; private Constants() { } }
jmandawg/camel
camel-core/src/main/java/org/apache/camel/model/Constants.java
Java
apache-2.0
1,441
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gobblin.metrics.broker; import org.apache.gobblin.broker.EmptyKey; import org.apache.gobblin.broker.ResourceInstance; import org.apache.gobblin.broker.gobblin_scopes.GobblinScopeTypes; import org.apache.gobblin.broker.iface.ConfigView; import org.apache.gobblin.broker.iface.NotConfiguredException; import org.apache.gobblin.broker.iface.ScopedConfigView; import org.apache.gobblin.broker.iface.SharedResourceFactory; import org.apache.gobblin.broker.iface.SharedResourceFactoryResponse; import org.apache.gobblin.broker.iface.SharedResourcesBroker; import org.apache.gobblin.metrics.event.lineage.LineageInfo; /** * A {@link SharedResourceFactory} to share a job level {@link LineageInfo} instance */ public class LineageInfoFactory implements SharedResourceFactory<LineageInfo, EmptyKey, GobblinScopeTypes> { public static final String FACTORY_NAME = "lineageInfo"; @Override public String getName() { return FACTORY_NAME; } @Override public SharedResourceFactoryResponse<LineageInfo> createResource(SharedResourcesBroker<GobblinScopeTypes> broker, ScopedConfigView<GobblinScopeTypes, EmptyKey> config) throws NotConfiguredException { return new ResourceInstance<>(new LineageInfo(config.getConfig())); } @Override public GobblinScopeTypes getAutoScope(SharedResourcesBroker<GobblinScopeTypes> broker, ConfigView<GobblinScopeTypes, EmptyKey> config) { return GobblinScopeTypes.JOB; } }
jinhyukchang/gobblin
gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin/metrics/broker/LineageInfoFactory.java
Java
apache-2.0
2,261
// Code generated by protoc-gen-go. DO NOT EDIT. // source: google/ads/googleads/v1/services/account_budget_service.proto package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" import _ "google.golang.org/genproto/googleapis/api/annotations" import ( context "golang.org/x/net/context" grpc "google.golang.org/grpc" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package // Request message for // [AccountBudgetService.GetAccountBudget][google.ads.googleads.v1.services.AccountBudgetService.GetAccountBudget]. type GetAccountBudgetRequest struct { // The resource name of the account-level budget to fetch. ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *GetAccountBudgetRequest) Reset() { *m = GetAccountBudgetRequest{} } func (m *GetAccountBudgetRequest) String() string { return proto.CompactTextString(m) } func (*GetAccountBudgetRequest) ProtoMessage() {} func (*GetAccountBudgetRequest) Descriptor() ([]byte, []int) { return fileDescriptor_account_budget_service_94295ad5ca373008, []int{0} } func (m *GetAccountBudgetRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetAccountBudgetRequest.Unmarshal(m, b) } func (m *GetAccountBudgetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_GetAccountBudgetRequest.Marshal(b, m, deterministic) } func (dst *GetAccountBudgetRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_GetAccountBudgetRequest.Merge(dst, src) } func (m *GetAccountBudgetRequest) XXX_Size() int { return xxx_messageInfo_GetAccountBudgetRequest.Size(m) } func (m *GetAccountBudgetRequest) XXX_DiscardUnknown() { xxx_messageInfo_GetAccountBudgetRequest.DiscardUnknown(m) } var xxx_messageInfo_GetAccountBudgetRequest proto.InternalMessageInfo func (m *GetAccountBudgetRequest) GetResourceName() string { if m != nil { return m.ResourceName } return "" } func init() { proto.RegisterType((*GetAccountBudgetRequest)(nil), "google.ads.googleads.v1.services.GetAccountBudgetRequest") } // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ grpc.ClientConn // This is a compile-time assertion to ensure that this generated file // is compatible with the grpc package it is being compiled against. const _ = grpc.SupportPackageIsVersion4 // AccountBudgetServiceClient is the client API for AccountBudgetService service. // // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. type AccountBudgetServiceClient interface { // Returns an account-level budget in full detail. GetAccountBudget(ctx context.Context, in *GetAccountBudgetRequest, opts ...grpc.CallOption) (*resources.AccountBudget, error) } type accountBudgetServiceClient struct { cc *grpc.ClientConn } func NewAccountBudgetServiceClient(cc *grpc.ClientConn) AccountBudgetServiceClient { return &accountBudgetServiceClient{cc} } func (c *accountBudgetServiceClient) GetAccountBudget(ctx context.Context, in *GetAccountBudgetRequest, opts ...grpc.CallOption) (*resources.AccountBudget, error) { out := new(resources.AccountBudget) err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AccountBudgetService/GetAccountBudget", in, out, opts...) if err != nil { return nil, err } return out, nil } // AccountBudgetServiceServer is the server API for AccountBudgetService service. type AccountBudgetServiceServer interface { // Returns an account-level budget in full detail. GetAccountBudget(context.Context, *GetAccountBudgetRequest) (*resources.AccountBudget, error) } func RegisterAccountBudgetServiceServer(s *grpc.Server, srv AccountBudgetServiceServer) { s.RegisterService(&_AccountBudgetService_serviceDesc, srv) } func _AccountBudgetService_GetAccountBudget_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(GetAccountBudgetRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(AccountBudgetServiceServer).GetAccountBudget(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/google.ads.googleads.v1.services.AccountBudgetService/GetAccountBudget", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(AccountBudgetServiceServer).GetAccountBudget(ctx, req.(*GetAccountBudgetRequest)) } return interceptor(ctx, in, info, handler) } var _AccountBudgetService_serviceDesc = grpc.ServiceDesc{ ServiceName: "google.ads.googleads.v1.services.AccountBudgetService", HandlerType: (*AccountBudgetServiceServer)(nil), Methods: []grpc.MethodDesc{ { MethodName: "GetAccountBudget", Handler: _AccountBudgetService_GetAccountBudget_Handler, }, }, Streams: []grpc.StreamDesc{}, Metadata: "google/ads/googleads/v1/services/account_budget_service.proto", } func init() { proto.RegisterFile("google/ads/googleads/v1/services/account_budget_service.proto", fileDescriptor_account_budget_service_94295ad5ca373008) } var fileDescriptor_account_budget_service_94295ad5ca373008 = []byte{ // 364 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0x4f, 0x4a, 0xf3, 0x40, 0x18, 0xc6, 0x49, 0x3e, 0xf8, 0xc0, 0xa0, 0x20, 0x41, 0x50, 0x8b, 0x8b, 0x52, 0xbb, 0x90, 0x2e, 0x66, 0x9a, 0x0a, 0xa2, 0x23, 0x15, 0xd2, 0x4d, 0x5d, 0x49, 0xa9, 0xd0, 0x85, 0x04, 0xca, 0x34, 0x19, 0x86, 0x40, 0x33, 0x53, 0xe7, 0x9d, 0x74, 0x23, 0x82, 0x78, 0x05, 0x6f, 0xe0, 0xd2, 0x1b, 0x78, 0x05, 0x97, 0x7a, 0x05, 0x57, 0x9e, 0x42, 0xd2, 0xe9, 0x04, 0xaa, 0x86, 0xee, 0x1e, 0xde, 0x3c, 0xbf, 0xf7, 0xcf, 0x93, 0xf1, 0xba, 0x5c, 0x4a, 0x3e, 0x65, 0x98, 0x26, 0x80, 0x8d, 0x2c, 0xd4, 0x3c, 0xc0, 0xc0, 0xd4, 0x3c, 0x8d, 0x19, 0x60, 0x1a, 0xc7, 0x32, 0x17, 0x7a, 0x3c, 0xc9, 0x13, 0xce, 0xf4, 0x78, 0x59, 0x47, 0x33, 0x25, 0xb5, 0xf4, 0xeb, 0x86, 0x41, 0x34, 0x01, 0x54, 0xe2, 0x68, 0x1e, 0x20, 0x8b, 0xd7, 0x4e, 0xaa, 0x06, 0x28, 0x06, 0x32, 0x57, 0xbf, 0x27, 0x98, 0xce, 0xb5, 0x03, 0xcb, 0xcd, 0x52, 0x4c, 0x85, 0x90, 0x9a, 0xea, 0x54, 0x0a, 0x30, 0x5f, 0x1b, 0x17, 0xde, 0x6e, 0x9f, 0xe9, 0xd0, 0x80, 0xbd, 0x05, 0x37, 0x64, 0xb7, 0x39, 0x03, 0xed, 0x1f, 0x7a, 0x5b, 0xb6, 0xf5, 0x58, 0xd0, 0x8c, 0xed, 0x39, 0x75, 0xe7, 0x68, 0x63, 0xb8, 0x69, 0x8b, 0x57, 0x34, 0x63, 0x9d, 0x77, 0xc7, 0xdb, 0x59, 0xa1, 0xaf, 0xcd, 0xbe, 0xfe, 0xab, 0xe3, 0x6d, 0xff, 0xec, 0xec, 0x9f, 0xa1, 0x75, 0x67, 0xa2, 0x8a, 0x6d, 0x6a, 0xed, 0x4a, 0xb4, 0xbc, 0x1f, 0xad, 0x80, 0x8d, 0xd3, 0xc7, 0x8f, 0xcf, 0x27, 0xb7, 0xe3, 0xb7, 0x8b, 0x90, 0xee, 0x56, 0x4e, 0xe9, 0xc6, 0x39, 0x68, 0x99, 0x31, 0x05, 0xb8, 0x65, 0x53, 0x33, 0x14, 0xe0, 0xd6, 0x7d, 0xef, 0xc1, 0xf5, 0x9a, 0xb1, 0xcc, 0xd6, 0x2e, 0xdb, 0xdb, 0xff, 0xeb, 0xf4, 0x41, 0x11, 0xec, 0xc0, 0xb9, 0xb9, 0x5c, 0xe2, 0x5c, 0x4e, 0xa9, 0xe0, 0x48, 0x2a, 0x8e, 0x39, 0x13, 0x8b, 0xd8, 0xed, 0x0f, 0x9c, 0xa5, 0x50, 0xfd, 0x60, 0xce, 0xad, 0x78, 0x76, 0xff, 0xf5, 0xc3, 0xf0, 0xc5, 0xad, 0xf7, 0x4d, 0xc3, 0x30, 0x01, 0x64, 0x64, 0xa1, 0x46, 0x01, 0x5a, 0x0e, 0x86, 0x37, 0x6b, 0x89, 0xc2, 0x04, 0xa2, 0xd2, 0x12, 0x8d, 0x82, 0xc8, 0x5a, 0xbe, 0xdc, 0xa6, 0xa9, 0x13, 0x12, 0x26, 0x40, 0x48, 0x69, 0x22, 0x64, 0x14, 0x10, 0x62, 0x6d, 0x93, 0xff, 0x8b, 0x3d, 0x8f, 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0x29, 0x16, 0xed, 0x5b, 0xd7, 0x02, 0x00, 0x00, }
pweil-/origin
vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/account_budget_service.pb.go
GO
apache-2.0
8,224
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.adapters.authentication; import java.util.Map; import org.keycloak.adapters.KeycloakDeployment; /** * The simple SPI for authenticating clients/applications . It's used by adapter during all OIDC backchannel requests to Keycloak server * (codeToToken exchange, refresh token or backchannel logout) . You can also use it in your application during direct access grants or service account request * (See the service-account example from Keycloak demo for more info) * * When you implement this SPI on the adapter (application) side, you also need to implement org.keycloak.authentication.ClientAuthenticator on the server side, * so your server is able to authenticate client * * You must specify a file * META-INF/services/org.keycloak.adapters.authentication.ClientCredentialsProvider in the WAR that this class is contained in (or in the JAR that is attached to the WEB-INF/lib or as jboss module * if you want to share the implementation among more WARs). * * NOTE: The SPI is not finished and method signatures are still subject to change in future versions (for example to support * authentication with client certificate) * * @see ClientIdAndSecretCredentialsProvider * @see JWTClientCredentialsProvider * * @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a> */ public interface ClientCredentialsProvider { /** * Return the ID of the provider. Use this ID in the keycloak.json configuration as the subelement of the "credentials" element * * For example if your provider has ID "kerberos-keytab" , use the configuration like this in keycloak.json * * "credentials": { * * "kerberos-keytab": { * "keytab": "/tmp/foo" * } * } * * @return */ String getId(); /** * Called by adapter during deployment of your application. You can for example read configuration and init your authenticator here * * @param deployment the adapter configuration * @param config the configuration of your provider read from keycloak.json . For the kerberos-keytab example above, it will return map with the single key "keytab" with value "/tmp/foo" */ void init(KeycloakDeployment deployment, Object config); /** * Called every time adapter needs to perform backchannel request * * @param deployment Fully resolved deployment * @param requestHeaders You should put any HTTP request headers you want to use for authentication of client. These headers will be attached to the HTTP request sent to Keycloak server * @param formParams You should put any request parameters you want to use for authentication of client. These parameters will be attached to the HTTP request sent to Keycloak server */ void setClientCredentials(KeycloakDeployment deployment, Map<String, String> requestHeaders, Map<String, String> formParams); }
ppolavar/keycloak
adapters/oidc/adapter-core/src/main/java/org/keycloak/adapters/authentication/ClientCredentialsProvider.java
Java
apache-2.0
3,598
#include <Eigen/Sparse> #include <vector> #include <iostream> typedef Eigen::SparseMatrix<double> SpMat; // declares a column-major sparse matrix type of double typedef Eigen::Triplet<double> T; void buildProblem(std::vector<T>& coefficients, Eigen::VectorXd& b, int n); void saveAsBitmap(const Eigen::VectorXd& x, int n, const char* filename); int main(int argc, char** argv) { if(argc!=2) { std::cerr << "Error: expected one and only one argument.\n"; return -1; } int n = 300; // size of the image int m = n*n; // number of unknows (=number of pixels) // Assembly: std::vector<T> coefficients; // list of non-zeros coefficients Eigen::VectorXd b(m); // the right hand side-vector resulting from the constraints buildProblem(coefficients, b, n); SpMat A(m,m); A.setFromTriplets(coefficients.begin(), coefficients.end()); // Solving: Eigen::SimplicialCholesky<SpMat> chol(A); // performs a Cholesky factorization of A Eigen::VectorXd x = chol.solve(b); // use the factorization to solve for the given right hand side // Export the result to a file: saveAsBitmap(x, n, argv[1]); return 0; }
OSVR/OSVR-Core
vendor/eigen/doc/special_examples/Tutorial_sparse_example.cpp
C++
apache-2.0
1,183
#include "drape/pointers.hpp" #include "base/logging.hpp" DpPointerTracker & DpPointerTracker::Instance() { static DpPointerTracker pointersTracker; return pointersTracker; } DpPointerTracker::~DpPointerTracker() { ASSERT(m_alivePointers.empty(), ()); } void DpPointerTracker::RefPtrNamed(void * refPtr, std::string const & name) { std::lock_guard<std::mutex> lock(m_mutex); if (refPtr != nullptr) { auto it = m_alivePointers.find(refPtr); if (it != m_alivePointers.end()) it->second.first++; else m_alivePointers.insert(make_pair(refPtr, make_pair(1, name))); } } void DpPointerTracker::DestroyPtr(void * p) { std::lock_guard<std::mutex> lock(m_mutex); ASSERT(p != nullptr, ()); auto it = m_alivePointers.find(p); if (it != m_alivePointers.end()) { if (it->second.first != 0) { LOG(LWARNING, ("Drape pointer [", it->second.second, p, "] was destroyed, but had references, ref count = ", it->second.first)); } m_alivePointers.erase(it); } } void DpPointerTracker::DerefPtr(void * p) { std::lock_guard<std::mutex> lock(m_mutex); if (p != nullptr) { auto it = m_alivePointers.find(p); if (it != m_alivePointers.end()) { ASSERT(it->second.first > 0, ()); it->second.first--; } } } DpPointerTracker::TAlivePointers const & DpPointerTracker::GetAlivePointers() const { return m_alivePointers; }
alexzatsepin/omim
drape/pointers.cpp
C++
apache-2.0
1,448
package snowballstem // to regenerate these commands, run // go run gengen.go /path/to/snowball/algorithms/directory //go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/arabic/stem_Unicode.sbl -go -o arabic/arabic_stemmer -gop arabic -gor github.com/blevesearch/snowballstem //go:generate gofmt -s -w arabic/arabic_stemmer.go //go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/danish/stem_ISO_8859_1.sbl -go -o danish/danish_stemmer -gop danish -gor github.com/blevesearch/snowballstem //go:generate gofmt -s -w danish/danish_stemmer.go //go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/dutch/stem_ISO_8859_1.sbl -go -o dutch/dutch_stemmer -gop dutch -gor github.com/blevesearch/snowballstem //go:generate gofmt -s -w dutch/dutch_stemmer.go //go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/english/stem_ISO_8859_1.sbl -go -o english/english_stemmer -gop english -gor github.com/blevesearch/snowballstem //go:generate gofmt -s -w english/english_stemmer.go //go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/finnish/stem_ISO_8859_1.sbl -go -o finnish/finnish_stemmer -gop finnish -gor github.com/blevesearch/snowballstem //go:generate gofmt -s -w finnish/finnish_stemmer.go //go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/french/stem_ISO_8859_1.sbl -go -o french/french_stemmer -gop french -gor github.com/blevesearch/snowballstem //go:generate gofmt -s -w french/french_stemmer.go //go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/german/stem_ISO_8859_1.sbl -go -o german/german_stemmer -gop german -gor github.com/blevesearch/snowballstem //go:generate gofmt -s -w german/german_stemmer.go //go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/hungarian/stem_Unicode.sbl -go -o hungarian/hungarian_stemmer -gop hungarian -gor github.com/blevesearch/snowballstem //go:generate gofmt -s -w hungarian/hungarian_stemmer.go //go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/irish/stem_ISO_8859_1.sbl -go -o irish/irish_stemmer -gop irish -gor github.com/blevesearch/snowballstem //go:generate gofmt -s -w irish/irish_stemmer.go //go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/italian/stem_ISO_8859_1.sbl -go -o italian/italian_stemmer -gop italian -gor github.com/blevesearch/snowballstem //go:generate gofmt -s -w italian/italian_stemmer.go //go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/norwegian/stem_ISO_8859_1.sbl -go -o norwegian/norwegian_stemmer -gop norwegian -gor github.com/blevesearch/snowballstem //go:generate gofmt -s -w norwegian/norwegian_stemmer.go //go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/porter/stem_ISO_8859_1.sbl -go -o porter/porter_stemmer -gop porter -gor github.com/blevesearch/snowballstem //go:generate gofmt -s -w porter/porter_stemmer.go //go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/portuguese/stem_ISO_8859_1.sbl -go -o portuguese/portuguese_stemmer -gop portuguese -gor github.com/blevesearch/snowballstem //go:generate gofmt -s -w portuguese/portuguese_stemmer.go //go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/romanian/stem_Unicode.sbl -go -o romanian/romanian_stemmer -gop romanian -gor github.com/blevesearch/snowballstem //go:generate gofmt -s -w romanian/romanian_stemmer.go //go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/russian/stem_Unicode.sbl -go -o russian/russian_stemmer -gop russian -gor github.com/blevesearch/snowballstem //go:generate gofmt -s -w russian/russian_stemmer.go //go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/spanish/stem_ISO_8859_1.sbl -go -o spanish/spanish_stemmer -gop spanish -gor github.com/blevesearch/snowballstem //go:generate gofmt -s -w spanish/spanish_stemmer.go //go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/swedish/stem_ISO_8859_1.sbl -go -o swedish/swedish_stemmer -gop swedish -gor github.com/blevesearch/snowballstem //go:generate gofmt -s -w swedish/swedish_stemmer.go //go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/tamil/stem_Unicode.sbl -go -o tamil/tamil_stemmer -gop tamil -gor github.com/blevesearch/snowballstem //go:generate gofmt -s -w tamil/tamil_stemmer.go //go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/turkish/stem_Unicode.sbl -go -o turkish/turkish_stemmer -gop turkish -gor github.com/blevesearch/snowballstem //go:generate gofmt -s -w turkish/turkish_stemmer.go
heroiclabs/nakama
vendor/github.com/blevesearch/snowballstem/gen.go
GO
apache-2.0
4,281
/* * Copyright 2012 MyBatis.org. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ibatis.reflection; import org.apache.ibatis.reflection.factory.DefaultObjectFactory; import org.apache.ibatis.reflection.factory.ObjectFactory; import org.apache.ibatis.reflection.wrapper.DefaultObjectWrapperFactory; import org.apache.ibatis.reflection.wrapper.ObjectWrapperFactory; /** * @author Clinton Begin */ /** * 一些系统级别的元对象 * */ public final class SystemMetaObject { public static final ObjectFactory DEFAULT_OBJECT_FACTORY = new DefaultObjectFactory(); public static final ObjectWrapperFactory DEFAULT_OBJECT_WRAPPER_FACTORY = new DefaultObjectWrapperFactory(); public static final MetaObject NULL_META_OBJECT = MetaObject.forObject(NullObject.class, DEFAULT_OBJECT_FACTORY, DEFAULT_OBJECT_WRAPPER_FACTORY); private SystemMetaObject() { // Prevent Instantiation of Static Class } //空对象 private static class NullObject { } public static MetaObject forObject(Object object) { return MetaObject.forObject(object, DEFAULT_OBJECT_FACTORY, DEFAULT_OBJECT_WRAPPER_FACTORY); } }
shurun19851206/mybaties
src/main/java/org/apache/ibatis/reflection/SystemMetaObject.java
Java
apache-2.0
1,662
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.geo; import com.spatial4j.core.exception.InvalidShapeException; import com.spatial4j.core.shape.Circle; import com.spatial4j.core.shape.Rectangle; import com.spatial4j.core.shape.Shape; import com.spatial4j.core.shape.ShapeCollection; import com.spatial4j.core.shape.jts.JtsGeometry; import com.spatial4j.core.shape.jts.JtsPoint; import com.vividsolutions.jts.geom.*; import org.elasticsearch.ElasticsearchIllegalArgumentException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.ElasticsearchTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions; import org.junit.Test; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import static org.elasticsearch.common.geo.builders.ShapeBuilder.SPATIAL_CONTEXT; /** * Tests for {@link GeoJSONShapeParser} */ public class GeoJSONShapeParserTests extends ElasticsearchTestCase { private final static GeometryFactory GEOMETRY_FACTORY = SPATIAL_CONTEXT.getGeometryFactory(); public void testParse_simplePoint() throws IOException { String pointGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Point") .startArray("coordinates").value(100.0).value(0.0).endArray() .endObject().string(); Point expected = GEOMETRY_FACTORY.createPoint(new Coordinate(100.0, 0.0)); assertGeometryEquals(new JtsPoint(expected, SPATIAL_CONTEXT), pointGeoJson); } public void testParse_lineString() throws IOException { String lineGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "LineString") .startArray("coordinates") .startArray().value(100.0).value(0.0).endArray() .startArray().value(101.0).value(1.0).endArray() .endArray() .endObject().string(); List<Coordinate> lineCoordinates = new ArrayList<>(); lineCoordinates.add(new Coordinate(100, 0)); lineCoordinates.add(new Coordinate(101, 1)); LineString expected = GEOMETRY_FACTORY.createLineString( lineCoordinates.toArray(new Coordinate[lineCoordinates.size()])); assertGeometryEquals(jtsGeom(expected), lineGeoJson); } public void testParse_multiLineString() throws IOException { String multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "MultiLineString") .startArray("coordinates") .startArray() .startArray().value(100.0).value(0.0).endArray() .startArray().value(101.0).value(1.0).endArray() .endArray() .startArray() .startArray().value(102.0).value(2.0).endArray() .startArray().value(103.0).value(3.0).endArray() .endArray() .endArray() .endObject().string(); MultiLineString expected = GEOMETRY_FACTORY.createMultiLineString(new LineString[]{ GEOMETRY_FACTORY.createLineString(new Coordinate[]{ new Coordinate(100, 0), new Coordinate(101, 1), }), GEOMETRY_FACTORY.createLineString(new Coordinate[]{ new Coordinate(102, 2), new Coordinate(103, 3), }), }); assertGeometryEquals(jtsGeom(expected), multilinesGeoJson); } public void testParse_circle() throws IOException { String multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "circle") .startArray("coordinates").value(100.0).value(0.0).endArray() .field("radius", "100m") .endObject().string(); Circle expected = SPATIAL_CONTEXT.makeCircle(100.0, 0.0, 360 * 100 / GeoUtils.EARTH_EQUATOR); assertGeometryEquals(expected, multilinesGeoJson); } public void testParse_envelope() throws IOException { // test #1: envelope with expected coordinate order (TopLeft, BottomRight) String multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "envelope") .startArray("coordinates") .startArray().value(-50).value(30).endArray() .startArray().value(50).value(-30).endArray() .endArray() .endObject().string(); Rectangle expected = SPATIAL_CONTEXT.makeRectangle(-50, 50, -30, 30); assertGeometryEquals(expected, multilinesGeoJson); // test #2: envelope with agnostic coordinate order (TopRight, BottomLeft) multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "envelope") .startArray("coordinates") .startArray().value(50).value(30).endArray() .startArray().value(-50).value(-30).endArray() .endArray() .endObject().string(); expected = SPATIAL_CONTEXT.makeRectangle(-50, 50, -30, 30); assertGeometryEquals(expected, multilinesGeoJson); // test #3: "envelope" (actually a triangle) with invalid number of coordinates (TopRight, BottomLeft, BottomRight) multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "envelope") .startArray("coordinates") .startArray().value(50).value(30).endArray() .startArray().value(-50).value(-30).endArray() .startArray().value(50).value(-39).endArray() .endArray() .endObject().string(); XContentParser parser = JsonXContent.jsonXContent.createParser(multilinesGeoJson); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test #4: "envelope" with empty coordinates multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "envelope") .startArray("coordinates") .endArray() .endObject().string(); parser = JsonXContent.jsonXContent.createParser(multilinesGeoJson); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); } public void testParse_polygonNoHoles() throws IOException { String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(100.0).value(1.0).endArray() .startArray().value(101.0).value(1.0).endArray() .startArray().value(101.0).value(0.0).endArray() .startArray().value(100.0).value(0.0).endArray() .startArray().value(100.0).value(1.0).endArray() .endArray() .endArray() .endObject().string(); List<Coordinate> shellCoordinates = new ArrayList<>(); shellCoordinates.add(new Coordinate(100, 0)); shellCoordinates.add(new Coordinate(101, 0)); shellCoordinates.add(new Coordinate(101, 1)); shellCoordinates.add(new Coordinate(100, 1)); shellCoordinates.add(new Coordinate(100, 0)); LinearRing shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()])); Polygon expected = GEOMETRY_FACTORY.createPolygon(shell, null); assertGeometryEquals(jtsGeom(expected), polygonGeoJson); } public void testParse_invalidPoint() throws IOException { // test case 1: create an invalid point object with multipoint data format String invalidPoint1 = XContentFactory.jsonBuilder().startObject().field("type", "point") .startArray("coordinates") .startArray().value(-74.011).value(40.753).endArray() .endArray() .endObject().string(); XContentParser parser = JsonXContent.jsonXContent.createParser(invalidPoint1); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test case 2: create an invalid point object with an empty number of coordinates String invalidPoint2 = XContentFactory.jsonBuilder().startObject().field("type", "point") .startArray("coordinates") .endArray() .endObject().string(); parser = JsonXContent.jsonXContent.createParser(invalidPoint2); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); } public void testParse_invalidMultipoint() throws IOException { // test case 1: create an invalid multipoint object with single coordinate String invalidMultipoint1 = XContentFactory.jsonBuilder().startObject().field("type", "multipoint") .startArray("coordinates").value(-74.011).value(40.753).endArray() .endObject().string(); XContentParser parser = JsonXContent.jsonXContent.createParser(invalidMultipoint1); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test case 2: create an invalid multipoint object with null coordinate String invalidMultipoint2 = XContentFactory.jsonBuilder().startObject().field("type", "multipoint") .startArray("coordinates") .endArray() .endObject().string(); parser = JsonXContent.jsonXContent.createParser(invalidMultipoint2); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test case 3: create a valid formatted multipoint object with invalid number (0) of coordinates String invalidMultipoint3 = XContentFactory.jsonBuilder().startObject().field("type", "multipoint") .startArray("coordinates") .startArray().endArray() .endArray() .endObject().string(); parser = JsonXContent.jsonXContent.createParser(invalidMultipoint3); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); } public void testParse_invalidMultiPolygon() throws IOException { // test invalid multipolygon (an "accidental" polygon with inner rings outside outer ring) String multiPolygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "MultiPolygon") .startArray("coordinates") .startArray()//one poly (with two holes) .startArray() .startArray().value(102.0).value(2.0).endArray() .startArray().value(103.0).value(2.0).endArray() .startArray().value(103.0).value(3.0).endArray() .startArray().value(102.0).value(3.0).endArray() .startArray().value(102.0).value(2.0).endArray() .endArray() .startArray()// first hole .startArray().value(100.0).value(0.0).endArray() .startArray().value(101.0).value(0.0).endArray() .startArray().value(101.0).value(1.0).endArray() .startArray().value(100.0).value(1.0).endArray() .startArray().value(100.0).value(0.0).endArray() .endArray() .startArray()//second hole .startArray().value(100.2).value(0.8).endArray() .startArray().value(100.2).value(0.2).endArray() .startArray().value(100.8).value(0.2).endArray() .startArray().value(100.8).value(0.8).endArray() .startArray().value(100.2).value(0.8).endArray() .endArray() .endArray() .endArray() .endObject().string(); XContentParser parser = JsonXContent.jsonXContent.createParser(multiPolygonGeoJson); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); } public void testParse_OGCPolygonWithoutHoles() throws IOException { // test 1: ccw poly not crossing dateline String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .endArray() .endObject().string(); XContentParser parser = JsonXContent.jsonXContent.createParser(polygonGeoJson); parser.nextToken(); Shape shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertPolygon(shape); // test 2: ccw poly crossing dateline polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .endArray() .endArray() .endObject().string(); parser = JsonXContent.jsonXContent.createParser(polygonGeoJson); parser.nextToken(); shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertMultiPolygon(shape); // test 3: cw poly not crossing dateline polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(180.0).value(10.0).endArray() .startArray().value(180.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .endArray() .endObject().string(); parser = JsonXContent.jsonXContent.createParser(polygonGeoJson); parser.nextToken(); shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertPolygon(shape); // test 4: cw poly crossing dateline polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(184.0).value(15.0).endArray() .startArray().value(184.0).value(0.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(174.0).value(-10.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .endArray() .endObject().string(); parser = JsonXContent.jsonXContent.createParser(polygonGeoJson); parser.nextToken(); shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertMultiPolygon(shape); } public void testParse_OGCPolygonWithHoles() throws IOException { // test 1: ccw poly not crossing dateline String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .startArray() .startArray().value(-172.0).value(8.0).endArray() .startArray().value(174.0).value(10.0).endArray() .startArray().value(-172.0).value(-8.0).endArray() .startArray().value(-172.0).value(8.0).endArray() .endArray() .endArray() .endObject().string(); XContentParser parser = JsonXContent.jsonXContent.createParser(polygonGeoJson); parser.nextToken(); Shape shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertPolygon(shape); // test 2: ccw poly crossing dateline polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .endArray() .startArray() .startArray().value(178.0).value(8.0).endArray() .startArray().value(-178.0).value(8.0).endArray() .startArray().value(-180.0).value(-8.0).endArray() .startArray().value(178.0).value(8.0).endArray() .endArray() .endArray() .endObject().string(); parser = JsonXContent.jsonXContent.createParser(polygonGeoJson); parser.nextToken(); shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertMultiPolygon(shape); // test 3: cw poly not crossing dateline polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(180.0).value(10.0).endArray() .startArray().value(179.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .startArray() .startArray().value(177.0).value(8.0).endArray() .startArray().value(179.0).value(10.0).endArray() .startArray().value(179.0).value(-8.0).endArray() .startArray().value(177.0).value(8.0).endArray() .endArray() .endArray() .endObject().string(); parser = JsonXContent.jsonXContent.createParser(polygonGeoJson); parser.nextToken(); shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertPolygon(shape); // test 4: cw poly crossing dateline polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(183.0).value(10.0).endArray() .startArray().value(183.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(183.0).value(10.0).endArray() .endArray() .startArray() .startArray().value(178.0).value(8.0).endArray() .startArray().value(182.0).value(8.0).endArray() .startArray().value(180.0).value(-8.0).endArray() .startArray().value(178.0).value(8.0).endArray() .endArray() .endArray() .endObject().string(); parser = JsonXContent.jsonXContent.createParser(polygonGeoJson); parser.nextToken(); shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertMultiPolygon(shape); } public void testParse_invalidPolygon() throws IOException { /** * The following 3 test cases ensure proper error handling of invalid polygons * per the GeoJSON specification */ // test case 1: create an invalid polygon with only 2 points String invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray() .startArray().value(-74.011).value(40.753).endArray() .startArray().value(-75.022).value(41.783).endArray() .endArray() .endArray() .endObject().string(); XContentParser parser = JsonXContent.jsonXContent.createParser(invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test case 2: create an invalid polygon with only 1 point invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray() .startArray().value(-74.011).value(40.753).endArray() .endArray() .endArray() .endObject().string(); parser = JsonXContent.jsonXContent.createParser(invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test case 3: create an invalid polygon with 0 points invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray() .startArray().endArray() .endArray() .endArray() .endObject().string(); parser = JsonXContent.jsonXContent.createParser(invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test case 4: create an invalid polygon with null value points invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray() .startArray().nullValue().nullValue().endArray() .endArray() .endArray() .endObject().string(); parser = JsonXContent.jsonXContent.createParser(invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchIllegalArgumentException.class); // test case 5: create an invalid polygon with 1 invalid LinearRing invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .nullValue().nullValue() .endArray() .endObject().string(); parser = JsonXContent.jsonXContent.createParser(invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchIllegalArgumentException.class); // test case 6: create an invalid polygon with 0 LinearRings invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates").endArray() .endObject().string(); parser = JsonXContent.jsonXContent.createParser(invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); // test case 7: create an invalid polygon with 0 LinearRings invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates") .startArray().value(-74.011).value(40.753).endArray() .endArray() .endObject().string(); parser = JsonXContent.jsonXContent.createParser(invalidPoly); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); } public void testParse_polygonWithHole() throws IOException { String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(100.0).value(1.0).endArray() .startArray().value(101.0).value(1.0).endArray() .startArray().value(101.0).value(0.0).endArray() .startArray().value(100.0).value(0.0).endArray() .startArray().value(100.0).value(1.0).endArray() .endArray() .startArray() .startArray().value(100.2).value(0.8).endArray() .startArray().value(100.2).value(0.2).endArray() .startArray().value(100.8).value(0.2).endArray() .startArray().value(100.8).value(0.8).endArray() .startArray().value(100.2).value(0.8).endArray() .endArray() .endArray() .endObject().string(); List<Coordinate> shellCoordinates = new ArrayList<>(); shellCoordinates.add(new Coordinate(100, 0)); shellCoordinates.add(new Coordinate(101, 0)); shellCoordinates.add(new Coordinate(101, 1)); shellCoordinates.add(new Coordinate(100, 1)); shellCoordinates.add(new Coordinate(100, 0)); List<Coordinate> holeCoordinates = new ArrayList<>(); holeCoordinates.add(new Coordinate(100.2, 0.2)); holeCoordinates.add(new Coordinate(100.8, 0.2)); holeCoordinates.add(new Coordinate(100.8, 0.8)); holeCoordinates.add(new Coordinate(100.2, 0.8)); holeCoordinates.add(new Coordinate(100.2, 0.2)); LinearRing shell = GEOMETRY_FACTORY.createLinearRing( shellCoordinates.toArray(new Coordinate[shellCoordinates.size()])); LinearRing[] holes = new LinearRing[1]; holes[0] = GEOMETRY_FACTORY.createLinearRing( holeCoordinates.toArray(new Coordinate[holeCoordinates.size()])); Polygon expected = GEOMETRY_FACTORY.createPolygon(shell, holes); assertGeometryEquals(jtsGeom(expected), polygonGeoJson); } public void testParse_selfCrossingPolygon() throws IOException { // test self crossing ccw poly not crossing dateline String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(-177.0).value(15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .endArray() .endObject().string(); XContentParser parser = JsonXContent.jsonXContent.createParser(polygonGeoJson); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, InvalidShapeException.class); } public void testParse_multiPoint() throws IOException { String multiPointGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "MultiPoint") .startArray("coordinates") .startArray().value(100.0).value(0.0).endArray() .startArray().value(101.0).value(1.0).endArray() .endArray() .endObject().string(); ShapeCollection expected = shapeCollection( SPATIAL_CONTEXT.makePoint(100, 0), SPATIAL_CONTEXT.makePoint(101, 1.0)); assertGeometryEquals(expected, multiPointGeoJson); } public void testParse_multiPolygon() throws IOException { // test #1: two polygons; one without hole, one with hole String multiPolygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "MultiPolygon") .startArray("coordinates") .startArray()//first poly (without holes) .startArray() .startArray().value(102.0).value(2.0).endArray() .startArray().value(103.0).value(2.0).endArray() .startArray().value(103.0).value(3.0).endArray() .startArray().value(102.0).value(3.0).endArray() .startArray().value(102.0).value(2.0).endArray() .endArray() .endArray() .startArray()//second poly (with hole) .startArray() .startArray().value(100.0).value(0.0).endArray() .startArray().value(101.0).value(0.0).endArray() .startArray().value(101.0).value(1.0).endArray() .startArray().value(100.0).value(1.0).endArray() .startArray().value(100.0).value(0.0).endArray() .endArray() .startArray()//hole .startArray().value(100.2).value(0.8).endArray() .startArray().value(100.2).value(0.2).endArray() .startArray().value(100.8).value(0.2).endArray() .startArray().value(100.8).value(0.8).endArray() .startArray().value(100.2).value(0.8).endArray() .endArray() .endArray() .endArray() .endObject().string(); List<Coordinate> shellCoordinates = new ArrayList<>(); shellCoordinates.add(new Coordinate(100, 0)); shellCoordinates.add(new Coordinate(101, 0)); shellCoordinates.add(new Coordinate(101, 1)); shellCoordinates.add(new Coordinate(100, 1)); shellCoordinates.add(new Coordinate(100, 0)); List<Coordinate> holeCoordinates = new ArrayList<>(); holeCoordinates.add(new Coordinate(100.2, 0.2)); holeCoordinates.add(new Coordinate(100.8, 0.2)); holeCoordinates.add(new Coordinate(100.8, 0.8)); holeCoordinates.add(new Coordinate(100.2, 0.8)); holeCoordinates.add(new Coordinate(100.2, 0.2)); LinearRing shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()])); LinearRing[] holes = new LinearRing[1]; holes[0] = GEOMETRY_FACTORY.createLinearRing(holeCoordinates.toArray(new Coordinate[holeCoordinates.size()])); Polygon withHoles = GEOMETRY_FACTORY.createPolygon(shell, holes); shellCoordinates = new ArrayList<>(); shellCoordinates.add(new Coordinate(102, 3)); shellCoordinates.add(new Coordinate(103, 3)); shellCoordinates.add(new Coordinate(103, 2)); shellCoordinates.add(new Coordinate(102, 2)); shellCoordinates.add(new Coordinate(102, 3)); shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()])); Polygon withoutHoles = GEOMETRY_FACTORY.createPolygon(shell, null); Shape expected = shapeCollection(withoutHoles, withHoles); assertGeometryEquals(expected, multiPolygonGeoJson); // test #2: multipolygon; one polygon with one hole // this test converting the multipolygon from a ShapeCollection type // to a simple polygon (jtsGeom) multiPolygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "MultiPolygon") .startArray("coordinates") .startArray() .startArray() .startArray().value(100.0).value(1.0).endArray() .startArray().value(101.0).value(1.0).endArray() .startArray().value(101.0).value(0.0).endArray() .startArray().value(100.0).value(0.0).endArray() .startArray().value(100.0).value(1.0).endArray() .endArray() .startArray()// hole .startArray().value(100.2).value(0.8).endArray() .startArray().value(100.2).value(0.2).endArray() .startArray().value(100.8).value(0.2).endArray() .startArray().value(100.8).value(0.8).endArray() .startArray().value(100.2).value(0.8).endArray() .endArray() .endArray() .endArray() .endObject().string(); shellCoordinates = new ArrayList<>(); shellCoordinates.add(new Coordinate(100, 1)); shellCoordinates.add(new Coordinate(101, 1)); shellCoordinates.add(new Coordinate(101, 0)); shellCoordinates.add(new Coordinate(100, 0)); shellCoordinates.add(new Coordinate(100, 1)); holeCoordinates = new ArrayList<>(); holeCoordinates.add(new Coordinate(100.2, 0.8)); holeCoordinates.add(new Coordinate(100.2, 0.2)); holeCoordinates.add(new Coordinate(100.8, 0.2)); holeCoordinates.add(new Coordinate(100.8, 0.8)); holeCoordinates.add(new Coordinate(100.2, 0.8)); shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()])); holes = new LinearRing[1]; holes[0] = GEOMETRY_FACTORY.createLinearRing(holeCoordinates.toArray(new Coordinate[holeCoordinates.size()])); withHoles = GEOMETRY_FACTORY.createPolygon(shell, holes); assertGeometryEquals(jtsGeom(withHoles), multiPolygonGeoJson); } public void testParse_geometryCollection() throws IOException { String geometryCollectionGeoJson = XContentFactory.jsonBuilder().startObject() .field("type", "GeometryCollection") .startArray("geometries") .startObject() .field("type", "LineString") .startArray("coordinates") .startArray().value(100.0).value(0.0).endArray() .startArray().value(101.0).value(1.0).endArray() .endArray() .endObject() .startObject() .field("type", "Point") .startArray("coordinates").value(102.0).value(2.0).endArray() .endObject() .endArray() .endObject() .string(); Shape[] expected = new Shape[2]; LineString expectedLineString = GEOMETRY_FACTORY.createLineString(new Coordinate[]{ new Coordinate(100, 0), new Coordinate(101, 1), }); expected[0] = jtsGeom(expectedLineString); Point expectedPoint = GEOMETRY_FACTORY.createPoint(new Coordinate(102.0, 2.0)); expected[1] = new JtsPoint(expectedPoint, SPATIAL_CONTEXT); //equals returns true only if geometries are in the same order assertGeometryEquals(shapeCollection(expected), geometryCollectionGeoJson); } public void testThatParserExtractsCorrectTypeAndCoordinatesFromArbitraryJson() throws IOException { String pointGeoJson = XContentFactory.jsonBuilder().startObject() .startObject("crs") .field("type", "name") .startObject("properties") .field("name", "urn:ogc:def:crs:OGC:1.3:CRS84") .endObject() .endObject() .field("bbox", "foobar") .field("type", "point") .field("bubu", "foobar") .startArray("coordinates").value(100.0).value(0.0).endArray() .startObject("nested").startArray("coordinates").value(200.0).value(0.0).endArray().endObject() .startObject("lala").field("type", "NotAPoint").endObject() .endObject().string(); Point expected = GEOMETRY_FACTORY.createPoint(new Coordinate(100.0, 0.0)); assertGeometryEquals(new JtsPoint(expected, SPATIAL_CONTEXT), pointGeoJson); } public void testParse_orientationOption() throws IOException { // test 1: valid ccw (right handed system) poly not crossing dateline (with 'right' field) String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .field("orientation", "right") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .startArray() .startArray().value(-172.0).value(8.0).endArray() .startArray().value(174.0).value(10.0).endArray() .startArray().value(-172.0).value(-8.0).endArray() .startArray().value(-172.0).value(8.0).endArray() .endArray() .endArray() .endObject().string(); XContentParser parser = JsonXContent.jsonXContent.createParser(polygonGeoJson); parser.nextToken(); Shape shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertPolygon(shape); // test 2: valid ccw (right handed system) poly not crossing dateline (with 'ccw' field) polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .field("orientation", "ccw") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .startArray() .startArray().value(-172.0).value(8.0).endArray() .startArray().value(174.0).value(10.0).endArray() .startArray().value(-172.0).value(-8.0).endArray() .startArray().value(-172.0).value(8.0).endArray() .endArray() .endArray() .endObject().string(); parser = JsonXContent.jsonXContent.createParser(polygonGeoJson); parser.nextToken(); shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertPolygon(shape); // test 3: valid ccw (right handed system) poly not crossing dateline (with 'counterclockwise' field) polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .field("orientation", "counterclockwise") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .startArray() .startArray().value(-172.0).value(8.0).endArray() .startArray().value(174.0).value(10.0).endArray() .startArray().value(-172.0).value(-8.0).endArray() .startArray().value(-172.0).value(8.0).endArray() .endArray() .endArray() .endObject().string(); parser = JsonXContent.jsonXContent.createParser(polygonGeoJson); parser.nextToken(); shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertPolygon(shape); // test 4: valid cw (left handed system) poly crossing dateline (with 'left' field) polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .field("orientation", "left") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .startArray() .startArray().value(-178.0).value(8.0).endArray() .startArray().value(178.0).value(8.0).endArray() .startArray().value(180.0).value(-8.0).endArray() .startArray().value(-178.0).value(8.0).endArray() .endArray() .endArray() .endObject().string(); parser = JsonXContent.jsonXContent.createParser(polygonGeoJson); parser.nextToken(); shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertMultiPolygon(shape); // test 5: valid cw multipoly (left handed system) poly crossing dateline (with 'cw' field) polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .field("orientation", "cw") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .startArray() .startArray().value(-178.0).value(8.0).endArray() .startArray().value(178.0).value(8.0).endArray() .startArray().value(180.0).value(-8.0).endArray() .startArray().value(-178.0).value(8.0).endArray() .endArray() .endArray() .endObject().string(); parser = JsonXContent.jsonXContent.createParser(polygonGeoJson); parser.nextToken(); shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertMultiPolygon(shape); // test 6: valid cw multipoly (left handed system) poly crossing dateline (with 'clockwise' field) polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon") .field("orientation", "clockwise") .startArray("coordinates") .startArray() .startArray().value(176.0).value(15.0).endArray() .startArray().value(-177.0).value(10.0).endArray() .startArray().value(-177.0).value(-10.0).endArray() .startArray().value(176.0).value(-15.0).endArray() .startArray().value(172.0).value(0.0).endArray() .startArray().value(176.0).value(15.0).endArray() .endArray() .startArray() .startArray().value(-178.0).value(8.0).endArray() .startArray().value(178.0).value(8.0).endArray() .startArray().value(180.0).value(-8.0).endArray() .startArray().value(-178.0).value(8.0).endArray() .endArray() .endArray() .endObject().string(); parser = JsonXContent.jsonXContent.createParser(polygonGeoJson); parser.nextToken(); shape = ShapeBuilder.parse(parser).build(); ElasticsearchGeoAssertions.assertMultiPolygon(shape); } private void assertGeometryEquals(Shape expected, String geoJson) throws IOException { XContentParser parser = JsonXContent.jsonXContent.createParser(geoJson); parser.nextToken(); ElasticsearchGeoAssertions.assertEquals(expected, ShapeBuilder.parse(parser).build()); } private ShapeCollection<Shape> shapeCollection(Shape... shapes) { return new ShapeCollection<>(Arrays.asList(shapes), SPATIAL_CONTEXT); } private ShapeCollection<Shape> shapeCollection(Geometry... geoms) { List<Shape> shapes = new ArrayList<>(geoms.length); for (Geometry geom : geoms) { shapes.add(jtsGeom(geom)); } return new ShapeCollection<>(shapes, SPATIAL_CONTEXT); } private JtsGeometry jtsGeom(Geometry geom) { return new JtsGeometry(geom, SPATIAL_CONTEXT, false, false); } }
Asimov4/elasticsearch
src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java
Java
apache-2.0
47,639
class Clfft < Formula desc "FFT functions written in OpenCL" homepage "https://github.com/clMathLibraries/clFFT" url "https://github.com/clMathLibraries/clFFT/archive/v2.12.2.tar.gz" sha256 "e7348c146ad48c6a3e6997b7702202ad3ee3b5df99edf7ef00bbacc21e897b12" bottle do cellar :any sha256 "1e3aca16a694b761c0267c0dfdd9933d43cddd3ed0ea9d20fd4016222a7748f9" => :high_sierra sha256 "009c0a8a81d783393abc8ca6307631c50e50ba72dc09e3f2cda5f2e2d8aa617c" => :sierra sha256 "369c0df6b06b7ea116120e177a44a54760cc4d7132a1fb59a83ef52a99a6b5f4" => :el_capitan sha256 "3c91564548f9b7844de09de3d54b77b43e7855c17def6d3efac5866e357635f0" => :yosemite end depends_on "boost" => :build depends_on "cmake" => :build def install mkdir "build" do system "cmake", "../src", "-DBUILD_EXAMPLES:BOOL=OFF", "-DBUILD_TEST:BOOL=OFF", *std_cmake_args system "make", "install" end pkgshare.install "src/examples" end test do system ENV.cxx, pkgshare/"examples/fft1d.c", "-I#{include}", "-L#{lib}", "-lclFFT", "-framework", "OpenCL", "-o", "fft1d" assert_match "one dimensional array of size N = 16", shell_output("./fft1d") end end
robohack/homebrew-core
Formula/clfft.rb
Ruby
bsd-2-clause
1,197
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include <memory> #include "base/bind.h" #include "base/run_loop.h" #include "chrome/browser/extensions/extension_browsertest.h" #include "chrome/browser/profiles/profile.h" #include "chrome/browser/profiles/profile_io_data.h" #include "chrome/browser/ui/browser.h" #include "chrome/test/base/ui_test_utils.h" #include "content/public/browser/browser_thread.h" #include "extensions/common/extension.h" #include "extensions/test/result_catcher.h" #include "net/base/escape.h" #include "net/base/url_util.h" #include "net/ssl/client_cert_store.h" #include "net/ssl/ssl_server_config.h" #include "net/test/embedded_test_server/embedded_test_server.h" #include "url/gurl.h" namespace { std::unique_ptr<net::ClientCertStore> CreateNullCertStore() { return nullptr; } void InstallNullCertStoreFactoryOnIOThread( content::ResourceContext* resource_context) { ProfileIOData::FromResourceContext(resource_context) ->set_client_cert_store_factory_for_testing( base::Bind(&CreateNullCertStore)); } } // namespace class BackgroundXhrTest : public ExtensionBrowserTest { protected: void RunTest(const std::string& path, const GURL& url) { const extensions::Extension* extension = LoadExtension(test_data_dir_.AppendASCII("background_xhr")); ASSERT_TRUE(extension); extensions::ResultCatcher catcher; GURL test_url = net::AppendQueryParameter(extension->GetResourceURL(path), "url", url.spec()); ui_test_utils::NavigateToURL(browser(), test_url); ASSERT_TRUE(catcher.GetNextResult()); } }; // Test that fetching a URL using TLS client auth doesn't crash, hang, or // prompt. IN_PROC_BROWSER_TEST_F(BackgroundXhrTest, TlsClientAuth) { // Install a null ClientCertStore so the client auth prompt isn't bypassed due // to the system certificate store returning no certificates. base::RunLoop loop; content::BrowserThread::PostTaskAndReply( content::BrowserThread::IO, FROM_HERE, base::Bind(&InstallNullCertStoreFactoryOnIOThread, browser()->profile()->GetResourceContext()), loop.QuitClosure()); loop.Run(); // Launch HTTPS server. net::EmbeddedTestServer https_server(net::EmbeddedTestServer::TYPE_HTTPS); net::SSLServerConfig ssl_config; ssl_config.client_cert_type = net::SSLServerConfig::ClientCertType::REQUIRE_CLIENT_CERT; https_server.SetSSLConfig(net::EmbeddedTestServer::CERT_OK, ssl_config); https_server.ServeFilesFromSourceDirectory("content/test/data"); ASSERT_TRUE(https_server.Start()); ASSERT_NO_FATAL_FAILURE( RunTest("test_tls_client_auth.html", https_server.GetURL("/"))); } // Test that fetching a URL using HTTP auth doesn't crash, hang, or prompt. IN_PROC_BROWSER_TEST_F(BackgroundXhrTest, HttpAuth) { ASSERT_TRUE(embedded_test_server()->Start()); ASSERT_NO_FATAL_FAILURE(RunTest( "test_http_auth.html", embedded_test_server()->GetURL("/auth-basic"))); }
axinging/chromium-crosswalk
chrome/browser/extensions/background_xhr_browsertest.cc
C++
bsd-3-clause
3,128
/** * @license * Copyright The Closure Library Authors. * SPDX-License-Identifier: Apache-2.0 */ /** * @fileoverview Contains utility methods to extract text content from HTML. * @supported IE 10+, Chrome 26+, Firefox 22+, Safari 7.1+, Opera 15+ */ goog.provide('goog.html.textExtractor'); goog.require('goog.dom.TagName'); goog.require('goog.html.sanitizer.HtmlSanitizer'); goog.require('goog.object'); goog.require('goog.userAgent'); /** * Safely extracts text from an untrusted HTML string using the HtmlSanitizer. * Compared to goog.html.utils.stripHtmlTags, it tries to be smarter about * printing newlines between blocks and leave out textual content that would not * be displayed to the user (such as SCRIPT and STYLE tags). * @param {string} html The untrusted HTML string. * @return {string} */ // TODO(pelizzi): consider an optional bool parameter to also extract the text // content of alt attributes and such. goog.html.textExtractor.extractTextContent = function(html) { 'use strict'; if (!goog.html.textExtractor.isSupported()) { return ''; } // Disable all attributes except style to protect against DOM clobbering. var sanitizer = new goog.html.sanitizer.HtmlSanitizer.Builder() .onlyAllowAttributes(['style']) .allowCssStyles() .build(); // The default policy of the sanitizer strips the content of tags such as // SCRIPT and STYLE, whose non-textual content would otherwise end up in the // extracted text. var sanitizedNodes = sanitizer.sanitizeToDomNode(html); // textContent and innerText do not handle spacing between block elements // properly. We need to reimplement a similar algorithm ourselves and account // for spacing between block elements. return goog.html.textExtractor.extractTextContentFromNode_(sanitizedNodes) .trim(); }; /** * Recursively extract text from the supplied DOM node and its descendants. * @param {!Node} node * @return {string} * @private */ goog.html.textExtractor.extractTextContentFromNode_ = function(node) { 'use strict'; switch (node.nodeType) { case Node.ELEMENT_NODE: var element = /** @type {!Element} */ (node); if (element.tagName == goog.dom.TagName.BR) { return '\n'; } var result = Array.prototype.map .call( node.childNodes, goog.html.textExtractor.extractTextContentFromNode_) .join(''); if (goog.html.textExtractor.isBlockElement_(element)) { result = '\n' + result + '\n'; } return result; case Node.TEXT_NODE: return node.nodeValue.replace(/\s+/g, ' ').trim(); default: return ''; } }; /** * A set of block elements. * @private @const {!Object<!goog.dom.TagName, boolean>} */ goog.html.textExtractor.BLOCK_ELEMENTS_ = goog.object.createSet( goog.dom.TagName.ADDRESS, goog.dom.TagName.BLOCKQUOTE, goog.dom.TagName.CENTER, goog.dom.TagName.DIV, goog.dom.TagName.DL, goog.dom.TagName.FIELDSET, goog.dom.TagName.FORM, goog.dom.TagName.H1, goog.dom.TagName.H2, goog.dom.TagName.H3, goog.dom.TagName.H4, goog.dom.TagName.H5, goog.dom.TagName.H6, goog.dom.TagName.HR, goog.dom.TagName.OL, goog.dom.TagName.P, goog.dom.TagName.PRE, goog.dom.TagName.TABLE, goog.dom.TagName.UL); /** * Returns true whether this is a block element, i.e. the browser would visually * separate the text content from the text content of the previous node. * @param {!Element} element * @return {boolean} * @private */ goog.html.textExtractor.isBlockElement_ = function(element) { 'use strict'; return element.style.display == 'block' || goog.html.textExtractor.BLOCK_ELEMENTS_.hasOwnProperty(element.tagName); }; /** * Whether the browser supports the text extractor. The extractor depends on the * HTML Sanitizer, which only supports IE starting from version 10. * Visible for testing. * @return {boolean} * @package */ goog.html.textExtractor.isSupported = function() { 'use strict'; return !goog.userAgent.IE || goog.userAgent.isVersionOrHigher(10); };
scheib/chromium
third_party/google-closure-library/closure/goog/html/textextractor.js
JavaScript
bsd-3-clause
4,176
<?php /** * Zend Framework * * LICENSE * * This source file is subject to the new BSD license that is bundled * with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://framework.zend.com/license/new-bsd * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to license@zend.com so we can send you a copy immediately. * * @category Zend * @package Zend_View * @subpackage UnitTests * @copyright Copyright (c) 2005-2014 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License * @version $Id$ */ // Call Zend_FormErrorsTest::main() if this source file is executed directly. if (!defined("PHPUnit_MAIN_METHOD")) { define("PHPUnit_MAIN_METHOD", "Zend_View_Helper_FormErrorsTest::main"); } require_once 'Zend/View/Helper/FormErrors.php'; require_once 'Zend/View.php'; /** * Test class for Zend_View_Helper_FormErrors * * @category Zend * @package Zend_View * @subpackage UnitTests * @copyright Copyright (c) 2005-2014 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License * @group Zend_View * @group Zend_View_Helper */ class Zend_View_Helper_FormErrorsTest extends PHPUnit_Framework_TestCase { /** * Runs the test methods of this class. * * @return void */ public static function main() { $suite = new PHPUnit_Framework_TestSuite("Zend_View_Helper_FormErrorsTest"); $result = PHPUnit_TextUI_TestRunner::run($suite); } /** * Sets up the fixture, for example, open a network connection. * This method is called before a test is executed. * * @return void */ public function setUp() { $this->view = new Zend_View(); $this->helper = new Zend_View_Helper_FormErrors(); $this->helper->setView($this->view); ob_start(); } /** * Tears down the fixture, for example, close a network connection. * This method is called after a test is executed. * * @return void */ public function tearDown() { ob_end_clean(); } public function testGetElementEndReturnsDefaultValue() { $this->assertEquals('</li></ul>', $this->helper->getElementEnd()); } public function testGetElementSeparatorReturnsDefaultValue() { $this->assertEquals('</li><li>', $this->helper->getElementSeparator()); } public function testGetElementStartReturnsDefaultValue() { $this->assertEquals('<ul%s><li>', $this->helper->getElementStart()); } public function testCanSetElementEndString() { $this->testGetElementEndReturnsDefaultValue(); $this->helper->setElementEnd('</pre></div>'); $this->assertEquals('</pre></div>', $this->helper->getElementEnd()); } public function testCanSetElementSeparatorString() { $this->testGetElementSeparatorReturnsDefaultValue(); $this->helper->setElementSeparator('<br />'); $this->assertEquals('<br />', $this->helper->getElementSeparator()); } public function testCanSetElementStartString() { $this->testGetElementStartReturnsDefaultValue(); $this->helper->setElementStart('<div><pre>'); $this->assertEquals('<div><pre>', $this->helper->getElementStart()); } public function testFormErrorsRendersUnorderedListByDefault() { $errors = array('foo', 'bar', 'baz'); $html = $this->helper->formErrors($errors); $this->assertContains('<ul', $html); foreach ($errors as $error) { $this->assertContains('<li>' . $error . '</li>', $html); } $this->assertContains('</ul>', $html); } public function testFormErrorsRendersWithSpecifiedStrings() { $this->helper->setElementStart('<dl><dt>') ->setElementSeparator('</dt><dt>') ->setElementEnd('</dt></dl>'); $errors = array('foo', 'bar', 'baz'); $html = $this->helper->formErrors($errors); $this->assertContains('<dl>', $html); foreach ($errors as $error) { $this->assertContains('<dt>' . $error . '</dt>', $html); } $this->assertContains('</dl>', $html); } public function testFormErrorsPreventsXssAttacks() { $errors = array( 'bad' => '\"><script>alert("xss");</script>', ); $html = $this->helper->formErrors($errors); $this->assertNotContains($errors['bad'], $html); $this->assertContains('&', $html); } public function testCanDisableEscapingErrorMessages() { $errors = array( 'foo' => '<b>Field is required</b>', 'bar' => '<a href="/help">Please click here for more information</a>' ); $html = $this->helper->formErrors($errors, array('escape' => false)); $this->assertContains($errors['foo'], $html); $this->assertContains($errors['bar'], $html); } /** * @group ZF-3477 * @link http://framework.zend.com/issues/browse/ZF-3477 */ public function testCanSetClassAttribute() { $options = array('class' => 'custom-class'); $actualHtml = $this->helper->formErrors(array(), $options); $this->assertEquals( '<ul class="custom-class"><li></li></ul>', $actualHtml ); } /** * @group ZF-5962 */ public function testCanSetElementStringsPerOptions() { $actual = $this->helper->formErrors( array('foo', 'bar', 'baz'), array( 'elementStart' => '<p>', 'elementEnd' => '</p>', 'elementSeparator' => '<br>', ) ); $this->assertEquals('<p>foo<br>bar<br>baz</p>', $actual); } } // Call Zend_View_Helper_FormErrorsTest::main() if this source file is executed directly. if (PHPUnit_MAIN_METHOD == "Zend_View_Helper_FormErrorsTest::main") { Zend_View_Helper_FormErrorsTest::main(); }
lyft/zf1
tests/Zend/View/Helper/FormErrorsTest.php
PHP
bsd-3-clause
6,251
void AddTaskGammaDeltaPID(Int_t gFilterBit = 768,Double_t fPtMin=0.2,Double_t fPtMax=2.0,Double_t fEtaMin=-0.8, Double_t fEtaMax=0.8,Double_t fChi2max=4.0,Int_t gNclustTPC=70, Int_t fparticle=0,Double_t nSigTPC = 3.0, Double_t nSigTOF = 3.0, Bool_t bSkipPileUp=kFALSE, TString sCentEstimator="V0M", Float_t fVzMin = -10.0, Float_t fVzMax = 10.0, TString sTrigger="kINT7", Int_t vnHarmonic=2, TString sDetForEP="TPC", TString sMCfilePath="alien:///alice/cern.ch/user/m/mhaque/nuanue18/HijingMC_LHC18q_FB768_DeftCut.root", TString sNUAFilePath = "alien:///alice/cern.ch/user/m/mhaque/nuanue18/wgtCharge_NUAFB768NoPUcutRun296244.root", TString sDetWgtsFile = "alien:///alice/cern.ch/user/m/mhaque/nuanue18/wgtCharge_NUAFB768NoPUcutRun296244.root", Bool_t bSkipAnalysis=kFALSE, const char *suffix = "") { printf("===================================================================================\n"); printf(" Initialising Task: AddTaskGammaDeltaPID \n"); printf("===================================================================================\n"); AliAnalysisManager *mgr = AliAnalysisManager::GetAnalysisManager(); TString outfileName = AliAnalysisManager::GetCommonFileName(); AliAnalysisDataContainer *cinput = mgr->GetCommonInputContainer(); // AOD event TString list1OutName = outfileName; // common outfile filename list1OutName += ":Results"; // This directory contains result histograms TString TaskName; TaskName.Form("gTaskGammaDeltaPID%d_%d_%s", gFilterBit, gNclustTPC, suffix); AliAnalysisTaskGammaDeltaPID *taskGammaPID = new AliAnalysisTaskGammaDeltaPID(TaskName); ///-------> Analysis Object Created, now pass the arguments if(sTrigger=="kMB" || sTrigger=="kmb" || sTrigger=="MB"){ // if We want MB Trigger taskGammaPID->SelectCollisionCandidates(AliVEvent::kMB); printf("\n =========> AddTaskCMW::Info() Trigger = kMB \n"); } else if(sTrigger=="kSemiCentral" || sTrigger=="SemiCentral" || sTrigger=="semicentral"){ taskGammaPID->SelectCollisionCandidates(AliVEvent::kSemiCentral); printf("\n =========> AddTaskCMW::Info() Trigger = kSemiCentral \n"); } else if(sTrigger=="kCentral" || sTrigger=="Central" || sTrigger=="central"){ taskGammaPID->SelectCollisionCandidates(AliVEvent::kCentral); printf("\n =========> AddTaskCMW::Info() Trigger = kCentral \n"); } else if(sTrigger=="kAny" || sTrigger=="kAll"){ taskGammaPID->SelectCollisionCandidates(AliVEvent::kINT7 | AliVEvent::kSemiCentral | AliVEvent::kCentral); } else{//if trigger==kINT7 or no trigger provided: taskGammaPID->SelectCollisionCandidates(AliVEvent::kINT7); // default is kINT7 printf("\n =========> AddTaskCMW::Info() Trigger = kINT7 \n"); } ///Set Event cuts: taskGammaPID->SetVzRangeMin(fVzMin); taskGammaPID->SetVzRangeMax(fVzMax); taskGammaPID->SetFlagSkipPileUpCuts(bSkipPileUp); taskGammaPID->SetFlagSkipAnalysis(bSkipAnalysis); cout<<"=========> AddTaskCMW::Info() setting Event Plane Det: "<<sDetForEP<<endl; taskGammaPID->SetDetectorforEventPlane(sDetForEP); if(sCentEstimator=="V0" || sCentEstimator=="V0M"){ taskGammaPID->SetCentralityEstimator("V0M"); } else{ taskGammaPID->SetCentralityEstimator(sCentEstimator); // use the Estimator provided in AddTask. } //Set Track cuts: taskGammaPID->SetPtRangeMin(fPtMin); taskGammaPID->SetPtRangeMax(fPtMax); taskGammaPID->SetEtaRangeMin(fEtaMin); taskGammaPID->SetEtaRangeMax(fEtaMax); taskGammaPID->SetTrackCutChi2Min(0.1); taskGammaPID->SetTrackCutdEdxMin(10.0); taskGammaPID->SetFilterBit(gFilterBit); taskGammaPID->SetNSigmaCutTPC(nSigTPC); /// For PID only.Does not apply to Inclusive Charged Tracks taskGammaPID->SetNSigmaCutTOF(nSigTOF); taskGammaPID->SetParticlePID(fparticle); taskGammaPID->SetTrackCutChi2Max(fChi2max); taskGammaPID->SetFlagUseKinkTracks(kFALSE); taskGammaPID->SetCumulantHarmonic(vnHarmonic); taskGammaPID->SetTrackCutNclusterMin(gNclustTPC); Bool_t bFillLambda=kFALSE; taskGammaPID->SetFlagAnalyseLambda(bFillLambda); /// -----> Separate AddTask Added For Lambda-X correlation /// AddTaskGammaDeltaPID.C //========================= Setup Correction Files ======================> TFile *fMCFile = TFile::Open(sMCfilePath,"READ"); TList *fListMC=NULL; if(fMCFile) { fListMC = dynamic_cast <TList*> (fMCFile->FindObjectAny("fMcEffiHij")); if(fListMC) { taskGammaPID->SetListForTrkCorr(fListMC); } else{ printf("\n\n *** AddTask::WARNING \n => MC file Exist, But TList Not Found!!! \n AddTask::Info() ===> NO MC Correction!! \n\n"); } } else{ printf("\n\n *** AddTask::WARNING \n => no MC file!!! \n AddTask::Info() ===> NO MC Correction!! \n\n"); } //-------------------------------------------------------------------------- std::cout<<" NUA file Path "<<sNUAFilePath.Data()<<std::endl; TFile* fNUAFile = TFile::Open(sNUAFilePath,"READ"); TList* fListNUA=NULL; //if(fNUAFile->IsOpen()) { if(fNUAFile){ fListNUA = dynamic_cast <TList*> (fNUAFile->FindObjectAny("fNUA_ChPosChNeg")); std::cout<<" \n ==============> TList found for NUA, here is all the histograms : "<<std::endl; //fListNUA->ls(); if(fListNUA) { taskGammaPID->SetListForNUACorr(fListNUA); } else{ printf("\n\n *** AddTask::WARNING => NUA file Exist,But TList Not Found!!\n AddTask::Info() ===> NO NUA Correction!! \n\n"); } } else{ printf("\n\n *** AddTask::WARNING => NUA file not Found or Wrong path Set in AddTask Macro!! \n\n"); } //----------------------------------------------------------------------------- TFile* fV0ZDCWgtsFile = TFile::Open(sDetWgtsFile,"READ"); TList* fListDetWgts=NULL; if(fV0ZDCWgtsFile) { fListDetWgts = dynamic_cast <TList*> (fV0ZDCWgtsFile->FindObjectAny("fWgtsV0ZDC")); std::cout<<" \n ==============> TList found for V0/ZDC wgts.. GOOD! "; // fListDetWgts->ls(); if(fListDetWgts) { taskGammaPID->SetListForV0MCorr(fListDetWgts); } else{ printf("\n\n *** AddTask::WARNING => V0/ZDC Weights file Exist, But TList Not Found!!"); printf("\n May be wrong TList name? No Correction for V0/ZDC !! \n\n"); } } else{ printf("\n\n *** AddTask::WARNING => NO File Found for V0/ZDC Wgts!!\n AddTask::Info() ===> No V0/ZDC Correction!! \n\n"); } //================================================================================= ///---> Now Pass data and containers to Analysis Object ---- mgr->AddTask(taskGammaPID); // connect the task to the analysis manager mgr->ConnectInput(taskGammaPID, 0, cinput); // give AOD event to my Task..!! AliAnalysisDataContainer *cOutPut1; TString sMyOutName; sMyOutName.Form("SimpleTask_%s",suffix); cOutPut1 = (AliAnalysisDataContainer *) mgr->CreateContainer(sMyOutName,TList::Class(),AliAnalysisManager::kOutputContainer,list1OutName.Data()); mgr->ConnectOutput(taskGammaPID, 1, cOutPut1); printf("\n\n ================> AddTask was Configured properly... <==================\n\n"); //return taskGammaPID; }//Task Ends
nschmidtALICE/AliPhysics
PWGCF/FLOW/macros/AddTaskGammaDeltaPID.C
C++
bsd-3-clause
7,275
/* Copyright 2017 Google Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreedto in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package mysql import ( "strings" "testing" ) func TestParseGTID(t *testing.T) { flavor := "fake flavor" gtidParsers[flavor] = func(s string) (GTID, error) { return fakeGTID{value: s}, nil } input := "12345" want := fakeGTID{value: "12345"} got, err := ParseGTID(flavor, input) if err != nil { t.Errorf("unexpected error: %v", err) } if got != want { t.Errorf("ParseGTID(%#v, %#v) = %#v, want %#v", flavor, input, got, want) } } func TestMustParseGTID(t *testing.T) { flavor := "fake flavor" gtidParsers[flavor] = func(s string) (GTID, error) { return fakeGTID{value: s}, nil } input := "12345" want := fakeGTID{value: "12345"} got := MustParseGTID(flavor, input) if got != want { t.Errorf("MustParseGTID(%#v, %#v) = %#v, want %#v", flavor, input, got, want) } } func TestMustParseGTIDError(t *testing.T) { defer func() { want := `parse error: unknown GTID flavor "unknown flavor !@$!@"` err := recover() if err == nil { t.Errorf("wrong error, got %#v, want %#v", err, want) } got, ok := err.(error) if !ok || !strings.HasPrefix(got.Error(), want) { t.Errorf("wrong error, got %#v, want %#v", got, want) } }() MustParseGTID("unknown flavor !@$!@", "yowzah") } func TestParseUnknownFlavor(t *testing.T) { want := `parse error: unknown GTID flavor "foobar8675309"` _, err := ParseGTID("foobar8675309", "foo") if !strings.HasPrefix(err.Error(), want) { t.Errorf("wrong error, got '%v', want '%v'", err, want) } } func TestEncodeGTID(t *testing.T) { input := fakeGTID{ flavor: "myflav", value: "1:2:3-4-5-6", } want := "myflav/1:2:3-4-5-6" if got := EncodeGTID(input); got != want { t.Errorf("EncodeGTID(%#v) = %#v, want %#v", input, got, want) } } func TestDecodeGTID(t *testing.T) { gtidParsers["flavorflav"] = func(s string) (GTID, error) { return fakeGTID{value: s}, nil } input := "flavorflav/123-456:789" want := fakeGTID{value: "123-456:789"} got, err := DecodeGTID(input) if err != nil { t.Errorf("unexpected error: %v", err) } if got != want { t.Errorf("DecodeGTID(%#v) = %#v, want %#v", input, got, want) } } func TestMustDecodeGTID(t *testing.T) { gtidParsers["flavorflav"] = func(s string) (GTID, error) { return fakeGTID{value: s}, nil } input := "flavorflav/123-456:789" want := fakeGTID{value: "123-456:789"} got := MustDecodeGTID(input) if got != want { t.Errorf("DecodeGTID(%#v) = %#v, want %#v", input, got, want) } } func TestMustDecodeGTIDError(t *testing.T) { defer func() { want := `parse error: unknown GTID flavor "unknown flavor !@$!@"` err := recover() if err == nil { t.Errorf("wrong error, got %#v, want %#v", err, want) } got, ok := err.(error) if !ok || !strings.HasPrefix(got.Error(), want) { t.Errorf("wrong error, got %#v, want %#v", got, want) } }() MustDecodeGTID("unknown flavor !@$!@/yowzah") } func TestEncodeNilGTID(t *testing.T) { input := GTID(nil) want := "" if got := EncodeGTID(input); got != want { t.Errorf("EncodeGTID(%#v) = %#v, want %#v", input, got, want) } } func TestDecodeNilGTID(t *testing.T) { input := "" want := GTID(nil) got, err := DecodeGTID(input) if err != nil { t.Errorf("unexpected error: %v", err) } if got != want { t.Errorf("DecodeGTID(%#v) = %#v, want %#v", input, got, want) } } func TestDecodeNoFlavor(t *testing.T) { gtidParsers[""] = func(s string) (GTID, error) { return fakeGTID{value: s}, nil } input := "12345" want := fakeGTID{value: "12345"} got, err := DecodeGTID(input) if err != nil { t.Errorf("unexpected error: %v", err) } if got != want { t.Errorf("DecodeGTID(%#v) = %#v, want %#v", input, got, want) } } func TestDecodeGTIDWithSeparator(t *testing.T) { gtidParsers["moobar"] = func(s string) (GTID, error) { return fakeGTID{value: s}, nil } input := "moobar/GTID containing / a slash" want := fakeGTID{value: "GTID containing / a slash"} got, err := DecodeGTID(input) if err != nil { t.Errorf("unexpected error: %v", err) } if got != want { t.Errorf("DecodeGTID(%#v) = %#v, want %#v", input, got, want) } } type fakeGTID struct { flavor, value string } func (f fakeGTID) String() string { return f.value } func (f fakeGTID) Flavor() string { return f.flavor } func (fakeGTID) SourceServer() interface{} { return int(1) } func (fakeGTID) SequenceNumber() interface{} { return int(1) } func (fakeGTID) SequenceDomain() interface{} { return int(1) } func (f fakeGTID) GTIDSet() GTIDSet { return nil } func (fakeGTID) ContainsGTID(GTID) bool { return false } func (fakeGTID) Contains(GTIDSet) bool { return false } func (f fakeGTID) Equal(other GTIDSet) bool { otherFake, ok := other.(fakeGTID) if !ok { return false } return f == otherFake } func (fakeGTID) AddGTID(GTID) GTIDSet { return nil }
NazarethCollege/heweb2017-devops-presentation
sites/tweetheat/src/backend/vendor/src/github.com/youtube/vitess/go/mysql/gtid_test.go
GO
mit
5,366
<?php class ActiveRecordTest extends DatabaseTest { public function set_up($connection_name=null) { parent::set_up($connection_name); $this->options = array('conditions' => 'blah', 'order' => 'blah'); } public function test_options_is_not() { $this->assert_false(Author::is_options_hash(null)); $this->assert_false(Author::is_options_hash('')); $this->assert_false(Author::is_options_hash('tito')); $this->assert_false(Author::is_options_hash(array())); $this->assert_false(Author::is_options_hash(array(1,2,3))); } /** * @expectedException ActiveRecord\ActiveRecordException */ public function test_options_hash_with_unknown_keys() { $this->assert_false(Author::is_options_hash(array('conditions' => 'blah', 'sharks' => 'laserz', 'dubya' => 'bush'))); } public function test_options_is_hash() { $this->assert_true(Author::is_options_hash($this->options)); } public function test_extract_and_validate_options() { $args = array('first',$this->options); $this->assert_equals($this->options,Author::extract_and_validate_options($args)); $this->assert_equals(array('first'),$args); } public function test_extract_and_validate_options_with_array_in_args() { $args = array('first',array(1,2),$this->options); $this->assert_equals($this->options,Author::extract_and_validate_options($args)); } public function test_extract_and_validate_options_removes_options_hash() { $args = array('first',$this->options); Author::extract_and_validate_options($args); $this->assert_equals(array('first'),$args); } public function test_extract_and_validate_options_nope() { $args = array('first'); $this->assert_equals(array(),Author::extract_and_validate_options($args)); $this->assert_equals(array('first'),$args); } public function test_extract_and_validate_options_nope_because_wasnt_at_end() { $args = array('first',$this->options,array(1,2)); $this->assert_equals(array(),Author::extract_and_validate_options($args)); } /** * @expectedException ActiveRecord\UndefinedPropertyException */ public function test_invalid_attribute() { $author = Author::find('first',array('conditions' => 'author_id=1')); $author->some_invalid_field_name; } public function test_invalid_attributes() { $book = Book::find(1); try { $book->update_attributes(array('name' => 'new name', 'invalid_attribute' => true , 'another_invalid_attribute' => 'something')); } catch (ActiveRecord\UndefinedPropertyException $e) { $exceptions = explode("\r\n", $e->getMessage()); } $this->assert_equals(1, substr_count($exceptions[0], 'invalid_attribute')); $this->assert_equals(1, substr_count($exceptions[1], 'another_invalid_attribute')); } public function test_getter_undefined_property_exception_includes_model_name() { $this->assert_exception_message_contains("Author->this_better_not_exist",function() { $author = new Author(); $author->this_better_not_exist; }); } public function test_mass_assignment_undefined_property_exception_includes_model_name() { $this->assert_exception_message_contains("Author->this_better_not_exist",function() { new Author(array("this_better_not_exist" => "hi")); }); } public function test_setter_undefined_property_exception_includes_model_name() { $this->assert_exception_message_contains("Author->this_better_not_exist",function() { $author = new Author(); $author->this_better_not_exist = "hi"; }); } public function test_get_values_for() { $book = Book::find_by_name('Ancient Art of Main Tanking'); $ret = $book->get_values_for(array('book_id','author_id')); $this->assert_equals(array('book_id','author_id'),array_keys($ret)); $this->assert_equals(array(1,1),array_values($ret)); } public function test_hyphenated_column_names_to_underscore() { if ($this->conn instanceof ActiveRecord\OciAdapter) return; $keys = array_keys(RmBldg::first()->attributes()); $this->assert_true(in_array('rm_name',$keys)); } public function test_column_names_with_spaces() { if ($this->conn instanceof ActiveRecord\OciAdapter) return; $keys = array_keys(RmBldg::first()->attributes()); $this->assert_true(in_array('space_out',$keys)); } public function test_mixed_case_column_name() { $keys = array_keys(Author::first()->attributes()); $this->assert_true(in_array('mixedcasefield',$keys)); } public function test_mixed_case_primary_key_save() { $venue = Venue::find(1); $venue->name = 'should not throw exception'; $venue->save(); $this->assert_equals($venue->name,Venue::find(1)->name); } public function test_reload() { $venue = Venue::find(1); $this->assert_equals('NY', $venue->state); $venue->state = 'VA'; $this->assert_equals('VA', $venue->state); $venue->reload(); $this->assert_equals('NY', $venue->state); } public function test_reload_protected_attribute() { $book = BookAttrAccessible::find(1); $book->name = "Should not stay"; $book->reload(); $this->assert_not_equals("Should not stay", $book->name); } public function test_active_record_model_home_not_set() { $home = ActiveRecord\Config::instance()->get_model_directory(); ActiveRecord\Config::instance()->set_model_directory(__FILE__); $this->assert_equals(false,class_exists('TestAutoload')); ActiveRecord\Config::instance()->set_model_directory($home); } public function test_auto_load_with_namespaced_model() { $this->assert_true(class_exists('NamespaceTest\Book')); } public function test_namespace_gets_stripped_from_table_name() { $model = new NamespaceTest\Book(); $this->assert_equals('books',$model->table()->table); } public function test_namespace_gets_stripped_from_inferred_foreign_key() { $model = new NamespaceTest\Book(); $table = ActiveRecord\Table::load(get_class($model)); $this->assert_equals($table->get_relationship('parent_book')->foreign_key[0], 'book_id'); $this->assert_equals($table->get_relationship('parent_book_2')->foreign_key[0], 'book_id'); $this->assert_equals($table->get_relationship('parent_book_3')->foreign_key[0], 'book_id'); } public function test_namespaced_relationship_associates_correctly() { $model = new NamespaceTest\Book(); $table = ActiveRecord\Table::load(get_class($model)); $this->assert_not_null($table->get_relationship('parent_book')); $this->assert_not_null($table->get_relationship('parent_book_2')); $this->assert_not_null($table->get_relationship('parent_book_3')); $this->assert_not_null($table->get_relationship('pages')); $this->assert_not_null($table->get_relationship('pages_2')); $this->assert_null($table->get_relationship('parent_book_4')); $this->assert_null($table->get_relationship('pages_3')); // Should refer to the same class $this->assert_same( ltrim($table->get_relationship('parent_book')->class_name, '\\'), ltrim($table->get_relationship('parent_book_2')->class_name, '\\') ); // Should refer to different classes $this->assert_not_same( ltrim($table->get_relationship('parent_book_2')->class_name, '\\'), ltrim($table->get_relationship('parent_book_3')->class_name, '\\') ); // Should refer to the same class $this->assert_same( ltrim($table->get_relationship('pages')->class_name, '\\'), ltrim($table->get_relationship('pages_2')->class_name, '\\') ); } public function test_should_have_all_column_attributes_when_initializing_with_array() { $author = new Author(array('name' => 'Tito')); $this->assert_true(count(array_keys($author->attributes())) >= 9); } public function test_defaults() { $author = new Author(); $this->assert_equals('default_name',$author->name); } public function test_alias_attribute_getter() { $venue = Venue::find(1); $this->assert_equals($venue->marquee, $venue->name); $this->assert_equals($venue->mycity, $venue->city); } public function test_alias_attribute_setter() { $venue = Venue::find(1); $venue->marquee = 'new name'; $this->assert_equals($venue->marquee, 'new name'); $this->assert_equals($venue->marquee, $venue->name); $venue->name = 'another name'; $this->assert_equals($venue->name, 'another name'); $this->assert_equals($venue->marquee, $venue->name); } public function test_alias_from_mass_attributes() { $venue = new Venue(array('marquee' => 'meme', 'id' => 123)); $this->assert_equals('meme',$venue->name); $this->assert_equals($venue->marquee,$venue->name); } public function test_gh18_isset_on_aliased_attribute() { $this->assert_true(isset(Venue::first()->marquee)); } public function test_attr_accessible() { $book = new BookAttrAccessible(array('name' => 'should not be set', 'author_id' => 1)); $this->assert_null($book->name); $this->assert_equals(1,$book->author_id); $book->name = 'test'; $this->assert_equals('test', $book->name); } public function test_attr_protected() { $book = new BookAttrAccessible(array('book_id' => 999)); $this->assert_null($book->book_id); $book->book_id = 999; $this->assert_equals(999, $book->book_id); } public function test_isset() { $book = new Book(); $this->assert_true(isset($book->name)); $this->assert_false(isset($book->sharks)); } public function test_readonly_only_halt_on_write_method() { $book = Book::first(array('readonly' => true)); $this->assert_true($book->is_readonly()); try { $book->save(); $this-fail('expected exception ActiveRecord\ReadonlyException'); } catch (ActiveRecord\ReadonlyException $e) { } $book->name = 'some new name'; $this->assert_equals($book->name, 'some new name'); } public function test_cast_when_using_setter() { $book = new Book(); $book->book_id = '1'; $this->assert_same(1,$book->book_id); } public function test_cast_when_loading() { $book = Book::find(1); $this->assert_same(1,$book->book_id); $this->assert_same('Ancient Art of Main Tanking',$book->name); } public function test_cast_defaults() { $book = new Book(); $this->assert_same(0.0,$book->special); } public function test_transaction_committed() { $original = Author::count(); $ret = Author::transaction(function() { Author::create(array("name" => "blah")); }); $this->assert_equals($original+1,Author::count()); $this->assert_true($ret); } public function test_transaction_committed_when_returning_true() { $original = Author::count(); $ret = Author::transaction(function() { Author::create(array("name" => "blah")); return true; }); $this->assert_equals($original+1,Author::count()); $this->assert_true($ret); } public function test_transaction_rolledback_by_returning_false() { $original = Author::count(); $ret = Author::transaction(function() { Author::create(array("name" => "blah")); return false; }); $this->assert_equals($original,Author::count()); $this->assert_false($ret); } public function test_transaction_rolledback_by_throwing_exception() { $original = Author::count(); $exception = null; try { Author::transaction(function() { Author::create(array("name" => "blah")); throw new Exception("blah"); }); } catch (Exception $e) { $exception = $e; } $this->assert_not_null($exception); $this->assert_equals($original,Author::count()); } public function test_delegate() { $event = Event::first(); $this->assert_equals($event->venue->state,$event->state); $this->assert_equals($event->venue->address,$event->address); } public function test_delegate_prefix() { $event = Event::first(); $this->assert_equals($event->host->name,$event->woot_name); } public function test_delegate_returns_null_if_relationship_does_not_exist() { $event = new Event(); $this->assert_null($event->state); } public function test_delegate_set_attribute() { $event = Event::first(); $event->state = 'MEXICO'; $this->assert_equals('MEXICO',$event->venue->state); } public function test_delegate_getter_gh_98() { Venue::$use_custom_get_state_getter = true; $event = Event::first(); $this->assert_equals('ny', $event->venue->state); $this->assert_equals('ny', $event->state); Venue::$use_custom_get_state_getter = false; } public function test_delegate_setter_gh_98() { Venue::$use_custom_set_state_setter = true; $event = Event::first(); $event->state = 'MEXICO'; $this->assert_equals('MEXICO#',$event->venue->state); Venue::$use_custom_set_state_setter = false; } public function test_table_name_with_underscores() { $this->assert_not_null(AwesomePerson::first()); } public function test_model_should_default_as_new_record() { $author = new Author(); $this->assert_true($author->is_new_record()); } public function test_setter() { $author = new Author(); $author->password = 'plaintext'; $this->assert_equals(md5('plaintext'),$author->encrypted_password); } public function test_setter_with_same_name_as_an_attribute() { $author = new Author(); $author->name = 'bob'; $this->assert_equals('BOB',$author->name); } public function test_getter() { $book = Book::first(); $this->assert_equals(strtoupper($book->name), $book->upper_name); } public function test_getter_with_same_name_as_an_attribute() { Book::$use_custom_get_name_getter = true; $book = new Book; $book->name = 'bob'; $this->assert_equals('BOB', $book->name); Book::$use_custom_get_name_getter = false; } public function test_setting_invalid_date_should_set_date_to_null() { $author = new Author(); $author->created_at = 'CURRENT_TIMESTAMP'; $this->assertNull($author->created_at); } public function test_table_name() { $this->assert_equals('authors',Author::table_name()); } /** * @expectedException ActiveRecord\ActiveRecordException */ public function test_undefined_instance_method() { Author::first()->find_by_name('sdf'); } public function test_clear_cache_for_specific_class() { $book_table1 = ActiveRecord\Table::load('Book'); $book_table2 = ActiveRecord\Table::load('Book'); ActiveRecord\Table::clear_cache('Book'); $book_table3 = ActiveRecord\Table::load('Book'); $this->assert_true($book_table1 === $book_table2); $this->assert_true($book_table1 !== $book_table3); } public function test_flag_dirty() { $author = new Author(); $author->flag_dirty('some_date'); $this->assert_has_keys('some_date', $author->dirty_attributes()); $this->assert_true($author->attribute_is_dirty('some_date')); $author->save(); $this->assert_false($author->attribute_is_dirty('some_date')); } public function test_flag_dirty_attribute_which_does_not_exit() { $author = new Author(); $author->flag_dirty('some_inexistant_property'); $this->assert_null($author->dirty_attributes()); $this->assert_false($author->attribute_is_dirty('some_inexistant_property')); } public function test_gh245_dirty_attribute_should_not_raise_php_notice_if_not_dirty() { $event = new Event(array('title' => "Fun")); $this->assert_false($event->attribute_is_dirty('description')); $this->assert_true($event->attribute_is_dirty('title')); } public function test_assigning_php_datetime_gets_converted_to_ar_datetime() { $author = new Author(); $author->created_at = $now = new \DateTime(); $this->assert_is_a("ActiveRecord\\DateTime",$author->created_at); $this->assert_datetime_equals($now,$author->created_at); } public function test_assigning_from_mass_assignment_php_datetime_gets_converted_to_ar_datetime() { $author = new Author(array('created_at' => new \DateTime())); $this->assert_is_a("ActiveRecord\\DateTime",$author->created_at); } public function test_get_real_attribute_name() { $venue = new Venue(); $this->assert_equals('name', $venue->get_real_attribute_name('name')); $this->assert_equals('name', $venue->get_real_attribute_name('marquee')); $this->assert_equals(null, $venue->get_real_attribute_name('invalid_field')); } public function test_id_setter_works_with_table_without_pk_named_attribute() { $author = new Author(array('id' => 123)); $this->assert_equals(123,$author->author_id); } public function test_query() { $row = Author::query('SELECT COUNT(*) AS n FROM authors',null)->fetch(); $this->assert_true($row['n'] > 1); $row = Author::query('SELECT COUNT(*) AS n FROM authors WHERE name=?',array('Tito'))->fetch(); $this->assert_equals(array('n' => 1), $row); } }; ?>
quochieuhcm/ci-vs-phpactiverecord
vendor/php-activerecord/php-activerecord/test/ActiveRecordTest.php
PHP
mit
16,383
define({ root: ({ _widgetLabel: "Demo", label1: "Ich bin ein Demo-Widget.", label2: "Dies ist konfigurierbar." }), "ar": 0, "cs": 0, "da": 0, "de": 0, "el": 0, "es": 0, "et": 0, "fi": 0, "fr": 0, "he": 0, "it": 0, "ja": 0, "ko": 0, "lt": 0, "lv": 0, "nb": 0, "nl": 0, "pl": 0, "pt-br": 0, "pt-pt": 0, "ro": 0, "ru": 0, "sv": 0, "th": 0, "tr": 0, "vi": 0, "zh-cn": 1 });
cmccullough2/cmv-wab-widgets
wab/2.3/widgets/samplewidgets/Demo/nls/de/strings.js
JavaScript
mit
443
// // Device.cs // // Author: // Alex Launi <alex.launi@gmail.com> // // Copyright (c) 2010 Alex Launi // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. #if ENABLE_GIO_HARDWARE using System; using System.Collections.Generic; using System.Linq; using GLib; using Banshee.Hardware; namespace Banshee.Hardware.Gio { /// <summary> /// A Device is a wrapper around the two metadata source, udev and gio. Banshee needs information /// from both sources, so this Device class is meant to provide a level of abstraction. /// </summary> abstract class RawDevice : IEquatable<RawDevice>, IComparable<RawDevice>, IRawDevice { const string UdevDevicePath = "DEVNAME"; RawDevice IRawDevice.Device { get { return this; } } public string DeviceNode { get { return UdevMetadata.GetPropertyString (UdevDevicePath); } } internal GioMetadataSource GioMetadata { get; private set; } internal UdevMetadataSource UdevMetadata { get; private set; } public abstract string Identifier { get; } public abstract string IdMediaPlayer { get; } public abstract bool IsRemovable { get; } public IDeviceMediaCapabilities MediaCapabilities { get; private set; } public abstract string Name { get; } public Manager Manager { get; private set; } public abstract string Model { get; } public abstract string Product { get; } public abstract string Serial { get; } public abstract string Subsystem { get; } public abstract string Uuid { get; } public abstract string Vendor { get; } protected RawDevice (Manager manager, GioMetadataSource gioMetadata, UdevMetadataSource udevMetadata) { Manager = manager; GioMetadata = gioMetadata; UdevMetadata = udevMetadata; if (!string.IsNullOrEmpty (IdMediaPlayer)) MediaCapabilities = new DeviceMediaCapabilities (IdMediaPlayer); } public bool Equals (RawDevice other) { return Identifier == other.Identifier; } public int CompareTo (RawDevice other) { return string.Compare (Identifier, other.Identifier); } public override int GetHashCode () { return Identifier.GetHashCode (); } public abstract string GetPropertyString (string key); public abstract double GetPropertyDouble (string key); public abstract bool GetPropertyBoolean (string key); public abstract int GetPropertyInteger (string key); public abstract ulong GetPropertyUInt64 (string key); public abstract string[] GetPropertyStringList (string key); public abstract bool PropertyExists (string key); } } #endif
mono-soc-2011/banshee
src/Backends/Banshee.Gio/Banshee.Hardware.Gio/LowLevel/RawDevice.cs
C#
mit
4,171
#if NETFX_CORE && !UNITY_EDITOR //using Thread = MarkerMetro.Unity.WinLegacy.Threading.Thread; //using ParameterizedThreadStart = MarkerMetro.Unity.WinLegacy.Threading.ParameterizedThreadStart; #endif using UnityEngine; using System.Collections; using System.Threading; namespace Pathfinding.Threading { }
MartinHartmannJensen/Unity2Dshooter
topdown shooter/Assets/AstarPathfindingProject/Utilities/AstarParallel.cs
C#
mit
307
package org.knowm.xchange.lakebtc.dto; import com.fasterxml.jackson.annotation.JsonProperty; /** User: cristian.lucaci Date: 10/3/2014 Time: 5:31 PM */ public class LakeBTCResponse<V> { private final String id; private final V result; /** * Constructor * * @param id * @param result */ public LakeBTCResponse(@JsonProperty("id") String id, @JsonProperty("result") V result) { this.id = id; this.result = result; } public V getResult() { return result; } public String getId() { return id; } @Override public String toString() { return String.format("LakeBTCResponse{id=%s, result=%s}", id, result); } }
stachon/XChange
xchange-lakebtc/src/main/java/org/knowm/xchange/lakebtc/dto/LakeBTCResponse.java
Java
mit
669
package org.knowm.xchange.bitcoinaverage.dto.marketdata; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonProperty; import java.text.SimpleDateFormat; import java.util.Date; import java.util.HashMap; import java.util.Locale; import java.util.Map; /** Data object representing List of Tickers from BitcoinAverage */ public final class BitcoinAverageTickers { private Map<String, BitcoinAverageTicker> tickers = new HashMap<>(); private Date timestamp; // Could alternatively add setters, but since these are mandatory public BitcoinAverageTickers(@JsonProperty("timestamp") String timestamp) { try { // Parse the timestamp into a Date object this.timestamp = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss Z", Locale.getDefault()).parse(timestamp); } catch (Exception e) { this.timestamp = null; } } @JsonAnySetter public void setTickers(String name, BitcoinAverageTicker ticker) { this.tickers.put(name, ticker); } public Map<String, BitcoinAverageTicker> getTickers() { return tickers; } public Date getTimestamp() { return timestamp; } @Override public String toString() { return "BitcoinAverageTicker [tickers=" + tickers + ", timestamp=" + timestamp + "]"; } }
stachon/XChange
xchange-bitcoinaverage/src/main/java/org/knowm/xchange/bitcoinaverage/dto/marketdata/BitcoinAverageTickers.java
Java
mit
1,315
import { SQLValue } from "./prepSQLParams"; export interface SQLInsertParams { insertColumns: string; insertValues: string; insertFields: Record<string, SQLValue>; } declare function prepSQLInsertParams<T extends Record<string, SQLValue>>( params: T, columns: Array<{ key: keyof T }>, ): SQLInsertParams; export default prepSQLInsertParams;
markogresak/DefinitelyTyped
types/lesgo/utils/prepSQLInsertParams.d.ts
TypeScript
mit
364
package parser import ( "regexp" "strings" ) var selfClosingTags = [...]string{ "meta", "img", "link", "input", "source", "area", "base", "col", "br", "hr", } var doctypes = map[string]string{ "5": `<!DOCTYPE html>`, "default": `<!DOCTYPE html>`, "xml": `<?xml version="1.0" encoding="utf-8" ?>`, "transitional": `<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">`, "strict": `<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">`, "frameset": `<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Frameset//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd">`, "1.1": `<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">`, "basic": `<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML Basic 1.1//EN" "http://www.w3.org/TR/xhtml-basic/xhtml-basic11.dtd">`, "mobile": `<!DOCTYPE html PUBLIC "-//WAPFORUM//DTD XHTML Mobile 1.2//EN" "http://www.openmobilealliance.org/tech/DTD/xhtml-mobile12.dtd">`, } type Node interface { Pos() SourcePosition } type SourcePosition struct { LineNum int ColNum int TokenLength int Filename string } func (s *SourcePosition) Pos() SourcePosition { return *s } type Doctype struct { SourcePosition Value string } func newDoctype(value string) *Doctype { dt := new(Doctype) dt.Value = value return dt } func (d *Doctype) String() string { if defined := doctypes[d.Value]; len(defined) != 0 { return defined } return `<!DOCTYPE ` + d.Value + `>` } type Comment struct { SourcePosition Value string Block *Block Silent bool } func newComment(value string) *Comment { dt := new(Comment) dt.Value = value dt.Block = nil dt.Silent = false return dt } type Text struct { SourcePosition Value string Raw bool } func newText(value string, raw bool) *Text { dt := new(Text) dt.Value = value dt.Raw = raw return dt } type Block struct { SourcePosition Children []Node } func newBlock() *Block { block := new(Block) block.Children = make([]Node, 0) return block } func (b *Block) push(node Node) { b.Children = append(b.Children, node) } func (b *Block) pushFront(node Node) { b.Children = append([]Node{node}, b.Children...) } func (b *Block) CanInline() bool { if len(b.Children) == 0 { return true } allText := true for _, child := range b.Children { if txt, ok := child.(*Text); !ok || txt.Raw { allText = false break } } return allText } const ( NamedBlockDefault = iota NamedBlockAppend NamedBlockPrepend ) type NamedBlock struct { Block Name string Modifier int } func newNamedBlock(name string) *NamedBlock { bb := new(NamedBlock) bb.Name = name bb.Block.Children = make([]Node, 0) bb.Modifier = NamedBlockDefault return bb } type Attribute struct { SourcePosition Name string Value string IsRaw bool Condition string } type Tag struct { SourcePosition Block *Block Name string IsInterpolated bool Attributes []Attribute } func newTag(name string) *Tag { tag := new(Tag) tag.Block = nil tag.Name = name tag.Attributes = make([]Attribute, 0) tag.IsInterpolated = false return tag } func (t *Tag) IsSelfClosing() bool { for _, tag := range selfClosingTags { if tag == t.Name { return true } } return false } func (t *Tag) IsRawText() bool { return t.Name == "style" || t.Name == "script" } type Condition struct { SourcePosition Positive *Block Negative *Block Expression string } func newCondition(exp string) *Condition { cond := new(Condition) cond.Expression = exp return cond } type Each struct { SourcePosition X string Y string Expression string Block *Block } func newEach(exp string) *Each { each := new(Each) each.Expression = exp return each } type Assignment struct { SourcePosition X string Expression string } func newAssignment(x, expression string) *Assignment { assgn := new(Assignment) assgn.X = x assgn.Expression = expression return assgn } type Mixin struct { SourcePosition Block *Block Name string Args []string } func newMixin(name, args string) *Mixin { mixin := new(Mixin) mixin.Name = name delExp := regexp.MustCompile(`,\s`) mixin.Args = delExp.Split(args, -1) for i := 0; i < len(mixin.Args); i++ { mixin.Args[i] = strings.TrimSpace(mixin.Args[i]) if mixin.Args[i] == "" { mixin.Args = append(mixin.Args[:i], mixin.Args[i+1:]...) i-- } } return mixin } type MixinCall struct { SourcePosition Name string Args []string } func newMixinCall(name, args string) *MixinCall { mixinCall := new(MixinCall) mixinCall.Name = name if args != "" { const t = "%s" quoteExp := regexp.MustCompile(`"(.*?)"`) delExp := regexp.MustCompile(`,\s`) quotes := quoteExp.FindAllString(args, -1) replaced := quoteExp.ReplaceAllString(args, t) mixinCall.Args = delExp.Split(replaced, -1) qi := 0 for i, arg := range mixinCall.Args { if arg == t { mixinCall.Args[i] = quotes[qi] qi++ } } } return mixinCall }
grvcoelho/webhulk
vendor/gopkg.in/kataras/iris.v8/vendor/github.com/eknkc/amber/parser/nodes.go
GO
mit
5,199
// { dg-do assemble } // Copyright (C) 1999, 2000, 2002, 2003 Free Software Foundation, Inc. // Contributed by Nathan Sidwell 22 Apr 1999 <nathan@acm.org> // derived from a bug report by <rch@larissa.sd.bi.ruhr-uni-bochum.de> // http://gcc.gnu.org/ml/gcc-bugs/1999-04n/msg00631.html // the code is wrong, but we fell over badly struct A { int A::fn(); // { dg-error "7:extra qualification" } int A::m; // { dg-error "7:extra qualification" } struct e; struct A::e {int i;}; // { dg-error "10:extra qualification" "qual" } struct A::expand { // { dg-error "qualified name" } int m; }; struct Z; expand me; // { dg-error "'expand' does not name a type" } void foo(struct A::e); void foo(struct A::z); // { dg-error "incomplete" } }; struct Q; struct B { struct A::fink { // { dg-error "does not name a class before" } int m; }; struct A::Z { // { dg-error "does not enclose" } A::Z not a member of B int m; }; int m; int n; struct ::Q { // { dg-error "global qual" } ::Q not a member of B int m; }; int A::fn() { // { dg-error "7:cannot define member" } A::fn not a member of B return 0; } void fn(struct ::Q &); void foo(struct A::y); // { dg-error "does not name a type" } no such member }; struct ::C { // { dg-error "invalid before" } extra qualification int i; }; namespace N { int fn(); struct F; } namespace NMS { void NMS::fn(); // { dg-error "should have been" } int NMS::i; // { dg-error "should have been" } struct NMS::D { // { dg-error "does not name a class" } int i; }; struct N::E { // { dg-error "does not name a class" } no such type int i; }; struct ::F { // { dg-error "global qual" } no such type int i; }; int N::fn() { // { dg-error "namespace" } N::fn not a member of NMS return 0; } struct N::F { // { dg-error "namespace" } N::F not a member of NMS int i; }; } NMS::D thing; // { dg-error "'D' in namespace 'NMS' does not name a type" } void NMS::fn() { i = 3; } // From PR c++/15766 - bad parse error recovery (2 bugs) void confusion1(const UndefinedType& a) // { dg-error "does not name a type" } { }
Gurgel100/gcc
gcc/testsuite/g++.old-deja/g++.other/decl5.C
C++
gpl-2.0
2,264
/** * */ package org.eevolution.model; import java.sql.ResultSet; import java.util.Properties; import org.compiere.util.CCache; /** * HR Period * @author Teo Sarca, www.arhipac.ro */ public class MHRPeriod extends X_HR_Period { /** * */ private static final long serialVersionUID = -7787966459848200539L; private static CCache<Integer, MHRPeriod> s_cache = new CCache<Integer, MHRPeriod>(Table_Name, 20); public static MHRPeriod get(Properties ctx, int HR_Period_ID) { if (HR_Period_ID <= 0) { return null; } // MHRPeriod period = s_cache.get(HR_Period_ID); if (period != null) { return period; } // Try Load period = new MHRPeriod(ctx, HR_Period_ID, null); if (period.get_ID() == HR_Period_ID) { s_cache.put(HR_Period_ID, period); } else { period = null; } return period; } public MHRPeriod(Properties ctx, int HR_Period_ID, String trxName) { super(ctx, HR_Period_ID, trxName); } public MHRPeriod(Properties ctx, ResultSet rs, String trxName) { super(ctx, rs, trxName); } }
erpcya/adempierePOS
org.eevolution.hr_and_payroll/src/main/java/base/org/eevolution/model/MHRPeriod.java
Java
gpl-2.0
1,057
<?php namespace GuzzleHttp; use GuzzleHttp\Event\CompleteEvent; use GuzzleHttp\Event\ErrorEvent; use GuzzleHttp\Event\RequestEvents; use GuzzleHttp\Message\ResponseInterface; use GuzzleHttp\UriTemplate; /** * Send a custom request * * @param string $method HTTP request method * @param string $url URL of the request * @param array $options Options to use with the request. * * @return ResponseInterface */ function request($method, $url, array $options = []) { static $client; if (!$client) { $client = new Client(); } return $client->send($client->createRequest($method, $url, $options)); } /** * Send a GET request * * @param string $url URL of the request * @param array $options Array of request options * * @return ResponseInterface */ function get($url, array $options = []) { return request('GET', $url, $options); } /** * Send a HEAD request * * @param string $url URL of the request * @param array $options Array of request options * * @return ResponseInterface */ function head($url, array $options = []) { return request('HEAD', $url, $options); } /** * Send a DELETE request * * @param string $url URL of the request * @param array $options Array of request options * * @return ResponseInterface */ function delete($url, array $options = []) { return request('DELETE', $url, $options); } /** * Send a POST request * * @param string $url URL of the request * @param array $options Array of request options * * @return ResponseInterface */ function post($url, array $options = []) { return request('POST', $url, $options); } /** * Send a PUT request * * @param string $url URL of the request * @param array $options Array of request options * * @return ResponseInterface */ function put($url, array $options = []) { return request('PUT', $url, $options); } /** * Send a PATCH request * * @param string $url URL of the request * @param array $options Array of request options * * @return ResponseInterface */ function patch($url, array $options = []) { return request('PATCH', $url, $options); } /** * Send an OPTIONS request * * @param string $url URL of the request * @param array $options Array of request options * * @return ResponseInterface */ function options($url, array $options = []) { return request('OPTIONS', $url, $options); } /** * Convenience method for sending multiple requests in parallel and retrieving * a hash map of requests to response objects or RequestException objects. * * Note: This method keeps every request and response in memory, and as such is * NOT recommended when sending a large number or an indeterminable number of * requests in parallel. * * @param ClientInterface $client Client used to send the requests * @param array|\Iterator $requests Requests to send in parallel * @param array $options Passes through the options available in * {@see GuzzleHttp\ClientInterface::sendAll()} * @return \SplObjectStorage Requests are the key and each value is a * {@see GuzzleHttp\Message\ResponseInterface} if the request succeeded or * a {@see GuzzleHttp\Exception\RequestException} if it failed. * @throws \InvalidArgumentException if the event format is incorrect. */ function batch(ClientInterface $client, $requests, array $options = []) { $hash = new \SplObjectStorage(); foreach ($requests as $request) { $hash->attach($request); } $handler = [ 'priority' => RequestEvents::EARLY, 'once' => true, 'fn' => function ($e) use ($hash) { $hash[$e->getRequest()] = $e; } ]; // Merge the necessary complete and error events to the event listeners so // that as each request succeeds or fails, it is added to the result hash. foreach (['complete', 'error'] as $name) { if (!isset($options[$name])) { $options[$name] = $handler; } elseif (is_callable($options[$name])) { $options[$name] = [['fn' => $options[$name]], $handler]; } elseif (is_array($options[$name])) { $options[$name][] = $handler; } else { throw new \InvalidArgumentException('Invalid event format'); } } // Send the requests in parallel and aggregate the results. $client->sendAll($requests, $options); // Update the received value for any of the intercepted requests. foreach ($hash as $request) { if ($hash[$request] instanceof CompleteEvent) { $hash[$request] = $hash[$request]->getResponse(); } elseif ($hash[$request] instanceof ErrorEvent) { $hash[$request] = $hash[$request]->getException(); } } return $hash; } /** * Gets a value from an array using a path syntax to retrieve nested data. * * This method does not allow for keys that contain "/". You must traverse * the array manually or using something more advanced like JMESPath to * work with keys that contain "/". * * // Get the bar key of a set of nested arrays. * // This is equivalent to $collection['foo']['baz']['bar'] but won't * // throw warnings for missing keys. * GuzzleHttp\get_path($data, 'foo/baz/bar'); * * @param array $data Data to retrieve values from * @param string $path Path to traverse and retrieve a value from * * @return mixed|null */ function get_path($data, $path) { $path = explode('/', $path); while (null !== ($part = array_shift($path))) { if (!is_array($data) || !isset($data[$part])) { return null; } $data = $data[$part]; } return $data; } /** * Set a value in a nested array key. Keys will be created as needed to set the * value. * * This function does not support keys that contain "/" or "[]" characters * because these are special tokens used when traversing the data structure. * A value may be prepended to an existing array by using "[]" as the final * key of a path. * * GuzzleHttp\get_path($data, 'foo/baz'); // null * GuzzleHttp\set_path($data, 'foo/baz/[]', 'a'); * GuzzleHttp\set_path($data, 'foo/baz/[]', 'b'); * GuzzleHttp\get_path($data, 'foo/baz'); * // Returns ['a', 'b'] * * @param array $data Data to modify by reference * @param string $path Path to set * @param mixed $value Value to set at the key * @throws \RuntimeException when trying to setPath using a nested path that * travels through a scalar value. */ function set_path(&$data, $path, $value) { $current =& $data; $queue = explode('/', $path); while (null !== ($key = array_shift($queue))) { if (!is_array($current)) { throw new \RuntimeException("Trying to setPath {$path}, but " . "{$key} is set and is not an array"); } elseif (!$queue) { if ($key == '[]') { $current[] = $value; } else { $current[$key] = $value; } } elseif (isset($current[$key])) { $current =& $current[$key]; } else { $current[$key] = []; $current =& $current[$key]; } } } /** * Expands a URI template * * @param string $template URI template * @param array $variables Template variables * * @return string */ function uri_template($template, array $variables) { if (function_exists('\\uri_template')) { return \uri_template($template, $variables); } static $uriTemplate; if (!$uriTemplate) { $uriTemplate = new UriTemplate(); } return $uriTemplate->expand($template, $variables); } /** * @internal */ function deprecation_proxy($object, $name, $arguments, $map) { if (!isset($map[$name])) { throw new \BadMethodCallException('Unknown method, ' . $name); } $message = sprintf('%s is deprecated and will be removed in a future ' . 'version. Update your code to use the equivalent %s method ' . 'instead to avoid breaking changes when this shim is removed.', get_class($object) . '::' . $name . '()', get_class($object) . '::' . $map[$name] . '()' ); trigger_error($message, E_USER_DEPRECATED); return call_user_func_array([$object, $map[$name]], $arguments); }
epoch365/rito-api-challenge
vendor/guzzlehttp/guzzle/src/functions.php
PHP
gpl-2.0
8,331
# # Copyright 2013 Tim O'Shea # # This file is part of PyBOMBS # # PyBOMBS is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # # PyBOMBS is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with PyBOMBS; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. # from globals import *; import recipe_loader; from recipe import recipe from pybombs_ops import *
scalable-networks/ext
pybombs/mod_pybombs/__init__.py
Python
gpl-2.0
852
<?php /* vim: set expandtab tabstop=4 shiftwidth=4: */ // +----------------------------------------------------------------------+ // | PHP version 4.0 | // +----------------------------------------------------------------------+ // | Copyright (c) 1997, 1998, 1999, 2000, 2001 The PHP Group | // +----------------------------------------------------------------------+ // | This source file is subject to version 2.0 of the PHP license, | // | that is bundled with this package in the file LICENSE, and is | // | available at through the world-wide-web at | // | http://www.php.net/license/2_02.txt. | // | If you did not receive a copy of the PHP license and are unable to | // | obtain it through the world-wide-web, please send a note to | // | license@php.net so we can mail you a copy immediately. | // +----------------------------------------------------------------------+ // | Authors: Adam Daniel <adaniel1@eesus.jnj.com> | // | Bertrand Mansion <bmansion@mamasam.com> | // +----------------------------------------------------------------------+ // // $Id: image.php,v 1.2 2010/12/14 17:35:23 moodlerobot Exp $ require_once("HTML/QuickForm/input.php"); /** * HTML class for a image type element * * @author Adam Daniel <adaniel1@eesus.jnj.com> * @author Bertrand Mansion <bmansion@mamasam.com> * @version 1.0 * @since PHP4.04pl1 * @access public */ class HTML_QuickForm_image extends HTML_QuickForm_input { // {{{ constructor /** * Class constructor * * @param string $elementName (optional)Element name attribute * @param string $src (optional)Image source * @param mixed $attributes (optional)Either a typical HTML attribute string * or an associative array * @since 1.0 * @access public * @return void */ function HTML_QuickForm_image($elementName=null, $src='', $attributes=null) { HTML_QuickForm_input::HTML_QuickForm_input($elementName, null, $attributes); $this->setType('image'); $this->setSource($src); } // end class constructor // }}} // {{{ setSource() /** * Sets source for image element * * @param string $src source for image element * @since 1.0 * @access public * @return void */ function setSource($src) { $this->updateAttributes(array('src' => $src)); } // end func setSource // }}} // {{{ setBorder() /** * Sets border size for image element * * @param string $border border for image element * @since 1.0 * @access public * @return void */ function setBorder($border) { $this->updateAttributes(array('border' => $border)); } // end func setBorder // }}} // {{{ setAlign() /** * Sets alignment for image element * * @param string $align alignment for image element * @since 1.0 * @access public * @return void */ function setAlign($align) { $this->updateAttributes(array('align' => $align)); } // end func setAlign // }}} // {{{ freeze() /** * Freeze the element so that only its value is returned * * @access public * @return void */ function freeze() { return false; } //end func freeze // }}} } // end class HTML_QuickForm_image ?>
jenarroyo/moodle-repo
lib/pear/HTML/QuickForm/image.php
PHP
gpl-3.0
3,740
#ifndef ___SUT_TRACE_H___ #define ___SUT_TRACE_H___ /******************************************************************************/ /* */ /* X r d S u t T r a c e . h h */ /* */ /* (C) 2005 by the Board of Trustees of the Leland Stanford, Jr., University */ /* Produced by Gerri Ganis for CERN */ /* */ /* This file is part of the XRootD software suite. */ /* */ /* XRootD is free software: you can redistribute it and/or modify it under */ /* the terms of the GNU Lesser General Public License as published by the */ /* Free Software Foundation, either version 3 of the License, or (at your */ /* option) any later version. */ /* */ /* XRootD is distributed in the hope that it will be useful, but WITHOUT */ /* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or */ /* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public */ /* License for more details. */ /* */ /* You should have received a copy of the GNU Lesser General Public License */ /* along with XRootD in a file called COPYING.LESSER (LGPL license) and file */ /* COPYING (GPL license). If not, see <http://www.gnu.org/licenses/>. */ /* */ /* The copyright holder's institutional names and contributor's names may not */ /* be used to endorse or promote products derived from this software without */ /* specific prior written permission of the institution or contributor. */ /******************************************************************************/ #ifndef ___OUC_TRACE_H___ #include "XrdOuc/XrdOucTrace.hh" #endif #ifndef ___SUT_AUX_H___ #include "XrdSut/XrdSutAux.hh" #endif #ifndef NODEBUG #include "XrdSys/XrdSysHeaders.hh" #define QTRACE(act) (sutTrace && (sutTrace->What & sutTRACE_ ## act)) #define PRINT(y) {if (sutTrace) {sutTrace->Beg(epname); \ cerr <<y; sutTrace->End();}} #define TRACE(act,x) if (QTRACE(act)) PRINT(x) #define DEBUG(y) TRACE(Debug,y) #define EPNAME(x) static const char *epname = x; #else #define QTRACE(x) #define PRINT(x) #define TRACE(x,y) #define DEBUG(x) #define EPNAME(x) #endif // // For error logging and tracing extern XrdOucTrace *sutTrace; #endif
bbockelm/xrootd_old_git
src/XrdSut/XrdSutTrace.hh
C++
gpl-3.0
2,936
#include <AP_HAL/AP_HAL.h> #include <AP_Common/AP_Common.h> #include <AP_Math/AP_Math.h> #include <AP_Notify/AP_Notify.h> #include "AP_BattMonitor.h" #include "AP_BattMonitor_SMBus_SUI.h" extern const AP_HAL::HAL& hal; #define REG_CELL_VOLTAGE 0x28 #define REG_CURRENT 0x2a // maximum number of cells that we can read data for #define SUI_MAX_CELL_READ 4 // Constructor AP_BattMonitor_SMBus_SUI::AP_BattMonitor_SMBus_SUI(AP_BattMonitor &mon, AP_BattMonitor::BattMonitor_State &mon_state, AP_BattMonitor_Params &params, AP_HAL::OwnPtr<AP_HAL::I2CDevice> dev, uint8_t _cell_count) : AP_BattMonitor_SMBus(mon, mon_state, params, std::move(dev)), cell_count(_cell_count) { _pec_supported = false; _dev->set_retries(2); } void AP_BattMonitor_SMBus_SUI::init(void) { AP_BattMonitor_SMBus::init(); if (_dev && timer_handle) { // run twice as fast for two phases _dev->adjust_periodic_callback(timer_handle, 50000); } } void AP_BattMonitor_SMBus_SUI::timer() { uint32_t tnow = AP_HAL::micros(); // we read in two phases as the device can stall if you read // current too rapidly after voltages phase_voltages = !phase_voltages; if (phase_voltages) { read_cell_voltages(); update_health(); return; } // read current int32_t current_ma; if (read_block_bare(REG_CURRENT, (uint8_t *)&current_ma, sizeof(current_ma))) { _state.current_amps = current_ma * -0.001; _state.last_time_micros = tnow; } read_full_charge_capacity(); read_temp(); read_serial_number(); read_remaining_capacity(); update_health(); } // read_block - returns true if successful bool AP_BattMonitor_SMBus_SUI::read_block(uint8_t reg, uint8_t* data, uint8_t len) const { // buffer to hold results (2 extra byte returned holding length and PEC) uint8_t buff[len+2]; // read bytes if (!_dev->read_registers(reg, buff, sizeof(buff))) { return false; } // get length uint8_t bufflen = buff[0]; // sanity check length returned by smbus if (bufflen == 0 || bufflen > len) { return false; } // check PEC uint8_t pec = get_PEC(AP_BATTMONITOR_SMBUS_I2C_ADDR, reg, true, buff, bufflen+1); if (pec != buff[bufflen+1]) { return false; } // copy data (excluding PEC) memcpy(data, &buff[1], bufflen); // return success return true; } // read_bare_block - returns true if successful bool AP_BattMonitor_SMBus_SUI::read_block_bare(uint8_t reg, uint8_t* data, uint8_t len) const { // read bytes if (!_dev->read_registers(reg, data, len)) { return false; } // return success return true; } void AP_BattMonitor_SMBus_SUI::read_cell_voltages() { // read cell voltages uint16_t voltbuff[SUI_MAX_CELL_READ]; if (!read_block(REG_CELL_VOLTAGE, (uint8_t *)voltbuff, sizeof(voltbuff))) { return; } float pack_voltage_mv = 0.0f; for (uint8_t i = 0; i < MIN(SUI_MAX_CELL_READ, cell_count); i++) { const uint16_t cell = voltbuff[i]; _state.cell_voltages.cells[i] = cell; pack_voltage_mv += (float)cell; } if (cell_count >= SUI_MAX_CELL_READ) { // we can't read voltage of all cells. get overall pack voltage to work out // an average for remaining cells uint16_t total_mv; if (read_block(BATTMONITOR_SMBUS_VOLTAGE, (uint8_t *)&total_mv, sizeof(total_mv))) { // if total voltage is below pack_voltage_mv then we will // read zero volts for the extra cells. total_mv = MAX(total_mv, pack_voltage_mv); const uint16_t cell_mv = (total_mv - pack_voltage_mv) / (cell_count - SUI_MAX_CELL_READ); for (uint8_t i = SUI_MAX_CELL_READ; i < cell_count; i++) { _state.cell_voltages.cells[i] = cell_mv; } pack_voltage_mv = total_mv; } else { // we can't get total pack voltage. Use average of cells we have so far const uint16_t cell_mv = pack_voltage_mv / SUI_MAX_CELL_READ; for (uint8_t i = SUI_MAX_CELL_READ; i < cell_count; i++) { _state.cell_voltages.cells[i] = cell_mv; } pack_voltage_mv += cell_mv * (cell_count - SUI_MAX_CELL_READ); } } _has_cell_voltages = true; // accumulate the pack voltage out of the total of the cells _state.voltage = pack_voltage_mv * 0.001; last_volt_read_us = AP_HAL::micros(); } /* update healthy flag */ void AP_BattMonitor_SMBus_SUI::update_health() { uint32_t now = AP_HAL::micros(); _state.healthy = (now - last_volt_read_us < AP_BATTMONITOR_SMBUS_TIMEOUT_MICROS) && (now - _state.last_time_micros < AP_BATTMONITOR_SMBUS_TIMEOUT_MICROS); }
squilter/ardupilot
libraries/AP_BattMonitor/AP_BattMonitor_SMBus_SUI.cpp
C++
gpl-3.0
4,883
/**********************************************************************\ RageLib - Models Copyright (C) 2009 Arushan/Aru <oneforaru at gmail.com> This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. \**********************************************************************/ using RageLib.Common.ResourceTypes; using RageLib.Models.Resource.Shaders; namespace RageLib.Models.Data { public class MaterialParamVector4 : MaterialParam { public Vector4 Value { get; private set; } internal MaterialParamVector4(uint hash, ShaderParamVector4 vector) : base(hash) { Value = vector.Data; } } }
St0rmDev/gtaivtools
RageLib/Models/Data/MaterialParamVector4.cs
C#
gpl-3.0
1,264
# Copyright (c) 2009 Mitch Garnaat http://garnaat.org/ # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. """ Represents a connection to the EC2 service. """ from boto.ec2.connection import EC2Connection from boto.vpc.vpc import VPC from boto.vpc.customergateway import CustomerGateway from boto.vpc.vpngateway import VpnGateway, Attachment from boto.vpc.dhcpoptions import DhcpOptions from boto.vpc.subnet import Subnet from boto.vpc.vpnconnection import VpnConnection class VPCConnection(EC2Connection): # VPC methods def get_all_vpcs(self, vpc_ids=None, filters=None): """ Retrieve information about your VPCs. You can filter results to return information only about those VPCs that match your search parameters. Otherwise, all VPCs associated with your account are returned. :type vpc_ids: list :param vpc_ids: A list of strings with the desired VPC ID's :type filters: list of tuples :param filters: A list of tuples containing filters. Each tuple consists of a filter key and a filter value. Possible filter keys are: - *state*, the state of the VPC (pending or available) - *cidrBlock*, CIDR block of the VPC - *dhcpOptionsId*, the ID of a set of DHCP options :rtype: list :return: A list of :class:`boto.vpc.vpc.VPC` """ params = {} if vpc_ids: self.build_list_params(params, vpc_ids, 'VpcId') if filters: i = 1 for filter in filters: params[('Filter.%d.Key' % i)] = filter[0] params[('Filter.%d.Value.1')] = filter[1] i += 1 return self.get_list('DescribeVpcs', params, [('item', VPC)]) def create_vpc(self, cidr_block): """ Create a new Virtual Private Cloud. :type cidr_block: str :param cidr_block: A valid CIDR block :rtype: The newly created VPC :return: A :class:`boto.vpc.vpc.VPC` object """ params = {'CidrBlock' : cidr_block} return self.get_object('CreateVpc', params, VPC) def delete_vpc(self, vpc_id): """ Delete a Virtual Private Cloud. :type vpc_id: str :param vpc_id: The ID of the vpc to be deleted. :rtype: bool :return: True if successful """ params = {'VpcId': vpc_id} return self.get_status('DeleteVpc', params) # Customer Gateways def get_all_customer_gateways(self, customer_gateway_ids=None, filters=None): """ Retrieve information about your CustomerGateways. You can filter results to return information only about those CustomerGateways that match your search parameters. Otherwise, all CustomerGateways associated with your account are returned. :type customer_gateway_ids: list :param customer_gateway_ids: A list of strings with the desired CustomerGateway ID's :type filters: list of tuples :param filters: A list of tuples containing filters. Each tuple consists of a filter key and a filter value. Possible filter keys are: - *state*, the state of the CustomerGateway (pending,available,deleting,deleted) - *type*, the type of customer gateway (ipsec.1) - *ipAddress* the IP address of customer gateway's internet-routable external inteface :rtype: list :return: A list of :class:`boto.vpc.customergateway.CustomerGateway` """ params = {} if customer_gateway_ids: self.build_list_params(params, customer_gateway_ids, 'CustomerGatewayId') if filters: i = 1 for filter in filters: params[('Filter.%d.Key' % i)] = filter[0] params[('Filter.%d.Value.1')] = filter[1] i += 1 return self.get_list('DescribeCustomerGateways', params, [('item', CustomerGateway)]) def create_customer_gateway(self, type, ip_address, bgp_asn): """ Create a new Customer Gateway :type type: str :param type: Type of VPN Connection. Only valid valid currently is 'ipsec.1' :type ip_address: str :param ip_address: Internet-routable IP address for customer's gateway. Must be a static address. :type bgp_asn: str :param bgp_asn: Customer gateway's Border Gateway Protocol (BGP) Autonomous System Number (ASN) :rtype: The newly created CustomerGateway :return: A :class:`boto.vpc.customergateway.CustomerGateway` object """ params = {'Type' : type, 'IpAddress' : ip_address, 'BgpAsn' : bgp_asn} return self.get_object('CreateCustomerGateway', params, CustomerGateway) def delete_customer_gateway(self, customer_gateway_id): """ Delete a Customer Gateway. :type customer_gateway_id: str :param customer_gateway_id: The ID of the customer_gateway to be deleted. :rtype: bool :return: True if successful """ params = {'CustomerGatewayId': customer_gateway_id} return self.get_status('DeleteCustomerGateway', params) # VPN Gateways def get_all_vpn_gateways(self, vpn_gateway_ids=None, filters=None): """ Retrieve information about your VpnGateways. You can filter results to return information only about those VpnGateways that match your search parameters. Otherwise, all VpnGateways associated with your account are returned. :type vpn_gateway_ids: list :param vpn_gateway_ids: A list of strings with the desired VpnGateway ID's :type filters: list of tuples :param filters: A list of tuples containing filters. Each tuple consists of a filter key and a filter value. Possible filter keys are: - *state*, the state of the VpnGateway (pending,available,deleting,deleted) - *type*, the type of customer gateway (ipsec.1) - *availabilityZone*, the Availability zone the VPN gateway is in. :rtype: list :return: A list of :class:`boto.vpc.customergateway.VpnGateway` """ params = {} if vpn_gateway_ids: self.build_list_params(params, vpn_gateway_ids, 'VpnGatewayId') if filters: i = 1 for filter in filters: params[('Filter.%d.Key' % i)] = filter[0] params[('Filter.%d.Value.1')] = filter[1] i += 1 return self.get_list('DescribeVpnGateways', params, [('item', VpnGateway)]) def create_vpn_gateway(self, type, availability_zone=None): """ Create a new Vpn Gateway :type type: str :param type: Type of VPN Connection. Only valid valid currently is 'ipsec.1' :type availability_zone: str :param availability_zone: The Availability Zone where you want the VPN gateway. :rtype: The newly created VpnGateway :return: A :class:`boto.vpc.vpngateway.VpnGateway` object """ params = {'Type' : type} if availability_zone: params['AvailabilityZone'] = availability_zone return self.get_object('CreateVpnGateway', params, VpnGateway) def delete_vpn_gateway(self, vpn_gateway_id): """ Delete a Vpn Gateway. :type vpn_gateway_id: str :param vpn_gateway_id: The ID of the vpn_gateway to be deleted. :rtype: bool :return: True if successful """ params = {'VpnGatewayId': vpn_gateway_id} return self.get_status('DeleteVpnGateway', params) def attach_vpn_gateway(self, vpn_gateway_id, vpc_id): """ Attaches a VPN gateway to a VPC. :type vpn_gateway_id: str :param vpn_gateway_id: The ID of the vpn_gateway to attach :type vpc_id: str :param vpc_id: The ID of the VPC you want to attach the gateway to. :rtype: An attachment :return: a :class:`boto.vpc.vpngateway.Attachment` """ params = {'VpnGatewayId': vpn_gateway_id, 'VpcId' : vpc_id} return self.get_object('AttachVpnGateway', params, Attachment) # Subnets def get_all_subnets(self, subnet_ids=None, filters=None): """ Retrieve information about your Subnets. You can filter results to return information only about those Subnets that match your search parameters. Otherwise, all Subnets associated with your account are returned. :type subnet_ids: list :param subnet_ids: A list of strings with the desired Subnet ID's :type filters: list of tuples :param filters: A list of tuples containing filters. Each tuple consists of a filter key and a filter value. Possible filter keys are: - *state*, the state of the Subnet (pending,available) - *vpdId*, the ID of teh VPC the subnet is in. - *cidrBlock*, CIDR block of the subnet - *availabilityZone*, the Availability Zone the subnet is in. :rtype: list :return: A list of :class:`boto.vpc.subnet.Subnet` """ params = {} if subnet_ids: self.build_list_params(params, subnet_ids, 'SubnetId') if filters: i = 1 for filter in filters: params[('Filter.%d.Key' % i)] = filter[0] params[('Filter.%d.Value.1' % i)] = filter[1] i += 1 return self.get_list('DescribeSubnets', params, [('item', Subnet)]) def create_subnet(self, vpc_id, cidr_block, availability_zone=None): """ Create a new Subnet :type vpc_id: str :param vpc_id: The ID of the VPC where you want to create the subnet. :type cidr_block: str :param cidr_block: The CIDR block you want the subnet to cover. :type availability_zone: str :param availability_zone: The AZ you want the subnet in :rtype: The newly created Subnet :return: A :class:`boto.vpc.customergateway.Subnet` object """ params = {'VpcId' : vpc_id, 'CidrBlock' : cidr_block} if availability_zone: params['AvailabilityZone'] = availability_zone return self.get_object('CreateSubnet', params, Subnet) def delete_subnet(self, subnet_id): """ Delete a subnet. :type subnet_id: str :param subnet_id: The ID of the subnet to be deleted. :rtype: bool :return: True if successful """ params = {'SubnetId': subnet_id} return self.get_status('DeleteSubnet', params) # DHCP Options def get_all_dhcp_options(self, dhcp_options_ids=None): """ Retrieve information about your DhcpOptions. :type dhcp_options_ids: list :param dhcp_options_ids: A list of strings with the desired DhcpOption ID's :rtype: list :return: A list of :class:`boto.vpc.dhcpoptions.DhcpOptions` """ params = {} if dhcp_options_ids: self.build_list_params(params, dhcp_options_ids, 'DhcpOptionsId') return self.get_list('DescribeDhcpOptions', params, [('item', DhcpOptions)]) def create_dhcp_options(self, vpc_id, cidr_block, availability_zone=None): """ Create a new DhcpOption :type vpc_id: str :param vpc_id: The ID of the VPC where you want to create the subnet. :type cidr_block: str :param cidr_block: The CIDR block you want the subnet to cover. :type availability_zone: str :param availability_zone: The AZ you want the subnet in :rtype: The newly created DhcpOption :return: A :class:`boto.vpc.customergateway.DhcpOption` object """ params = {'VpcId' : vpc_id, 'CidrBlock' : cidr_block} if availability_zone: params['AvailabilityZone'] = availability_zone return self.get_object('CreateDhcpOption', params, DhcpOptions) def delete_dhcp_options(self, dhcp_options_id): """ Delete a DHCP Options :type dhcp_options_id: str :param dhcp_options_id: The ID of the DHCP Options to be deleted. :rtype: bool :return: True if successful """ params = {'DhcpOptionsId': dhcp_options_id} return self.get_status('DeleteDhcpOptions', params) def associate_dhcp_options(self, dhcp_options_id, vpc_id): """ Associate a set of Dhcp Options with a VPC. :type dhcp_options_id: str :param dhcp_options_id: The ID of the Dhcp Options :type vpc_id: str :param vpc_id: The ID of the VPC. :rtype: bool :return: True if successful """ params = {'DhcpOptionsId': dhcp_options_id, 'VpcId' : vpc_id} return self.get_status('AssociateDhcpOptions', params) # VPN Connection def get_all_vpn_connections(self, vpn_connection_ids=None, filters=None): """ Retrieve information about your VPN_CONNECTIONs. You can filter results to return information only about those VPN_CONNECTIONs that match your search parameters. Otherwise, all VPN_CONNECTIONs associated with your account are returned. :type vpn_connection_ids: list :param vpn_connection_ids: A list of strings with the desired VPN_CONNECTION ID's :type filters: list of tuples :param filters: A list of tuples containing filters. Each tuple consists of a filter key and a filter value. Possible filter keys are: - *state*, the state of the VPN_CONNECTION pending,available,deleting,deleted - *type*, the type of connection, currently 'ipsec.1' - *customerGatewayId*, the ID of the customer gateway associated with the VPN - *vpnGatewayId*, the ID of the VPN gateway associated with the VPN connection :rtype: list :return: A list of :class:`boto.vpn_connection.vpnconnection.VpnConnection` """ params = {} if vpn_connection_ids: self.build_list_params(params, vpn_connection_ids, 'Vpn_ConnectionId') if filters: i = 1 for filter in filters: params[('Filter.%d.Key' % i)] = filter[0] params[('Filter.%d.Value.1')] = filter[1] i += 1 return self.get_list('DescribeVpnConnections', params, [('item', VpnConnection)]) def create_vpn_connection(self, type, customer_gateway_id, vpn_gateway_id): """ Create a new VPN Connection. :type type: str :param type: The type of VPN Connection. Currently only 'ipsec.1' is supported :type customer_gateway_id: str :param customer_gateway_id: The ID of the customer gateway. :type vpn_gateway_id: str :param vpn_gateway_id: The ID of the VPN gateway. :rtype: The newly created VpnConnection :return: A :class:`boto.vpc.vpnconnection.VpnConnection` object """ params = {'Type' : type, 'CustomerGatewayId' : customer_gateway_id, 'VpnGatewayId' : vpn_gateway_id} return self.get_object('CreateVpnConnection', params, VpnConnection) def delete_vpn_connection(self, vpn_connection_id): """ Delete a VPN Connection. :type vpn_connection_id: str :param vpn_connection_id: The ID of the vpn_connection to be deleted. :rtype: bool :return: True if successful """ params = {'VpnConnectionId': vpn_connection_id} return self.get_status('DeleteVpnConnection', params)
apavlo/h-store
third_party/python/boto/vpc/__init__.py
Python
gpl-3.0
17,897
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ /* * Date: 26 November 2000 * * *SUMMARY: Passing a RegExp object to a RegExp() constructor. *This test arose from Bugzilla bug 61266. The ECMA3 section is: * * 15.10.4.1 new RegExp(pattern, flags) * * If pattern is an object R whose [[Class]] property is "RegExp" and * flags is undefined, then let P be the pattern used to construct R * and let F be the flags used to construct R. If pattern is an object R * whose [[Class]] property is "RegExp" and flags is not undefined, * then throw a TypeError exception. Otherwise, let P be the empty string * if pattern is undefined and ToString(pattern) otherwise, and let F be * the empty string if flags is undefined and ToString(flags) otherwise. * * *The current test will check the first scenario outlined above: * * "pattern" is itself a RegExp object R * "flags" is undefined * * We check that a new RegExp object obj2 defined from these parameters * is morally the same as the original RegExp object obj1. Of course, they * can't be equal as objects - so we check their enumerable properties... * * In this test, the initial RegExp object obj1 will not include a * flag. This test is identical to test 15.10.4.1-1.js, except that * here we use this syntax: * * obj2 = new RegExp(obj1, undefined); * * instead of: * * obj2 = new RegExp(obj1); */ //----------------------------------------------------------------------------- var gTestfile = '15.10.4.1-2.js'; var BUGNUMBER = '61266'; var summary = 'Passing a RegExp object to a RegExp() constructor'; var statprefix = 'Applying RegExp() twice to pattern '; var statsuffix = '; testing property '; var singlequote = "'"; var i = -1; var s = ''; var obj1 = {}; var obj2 = {}; var status = ''; var actual = ''; var expect = ''; var msg = ''; var patterns = new Array(); // various regular expressions to try - patterns[0] = ''; patterns[1] = 'abc'; patterns[2] = '(.*)(3-1)\s\w'; patterns[3] = '(.*)(...)\\s\\w'; patterns[4] = '[^A-Za-z0-9_]'; patterns[5] = '[^\f\n\r\t\v](123.5)([4 - 8]$)'; //------------------------------------------------------------------------------------------------- test(); //------------------------------------------------------------------------------------------------- function test() { enterFunc ('test'); printBugNumber(BUGNUMBER); printStatus (summary); for (i in patterns) { s = patterns[i]; status =getStatus(s); obj1 = new RegExp(s); obj2 = new RegExp(obj1, undefined); // see introduction to bug reportCompare (obj1 + '', obj2 + '', status); } exitFunc ('test'); } function getStatus(regexp) { return (statprefix + quote(regexp) + statsuffix); } function quote(text) { return (singlequote + text + singlequote); }
sam/htmlunit-rhino-fork
testsrc/tests/ecma_3/RegExp/15.10.4.1-2.js
JavaScript
mpl-2.0
3,091
# # Copyright (C) 2012 Instructure, Inc. # # This file is part of Canvas. # # Canvas is free software: you can redistribute it and/or modify it under # the terms of the GNU Affero General Public License as published by the Free # Software Foundation, version 3 of the License. # # Canvas is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU Affero General Public License for more # details. # # You should have received a copy of the GNU Affero General Public License along # with this program. If not, see <http://www.gnu.org/licenses/>. # require File.expand_path(File.dirname(__FILE__) + '/spec_helper') shared_examples_for "cassandra page views" do before do if Canvas::Cassandra::Database.configured?('page_views') Setting.set('enable_page_views', 'cassandra') else pending "needs cassandra page_views configuration" end end end
nditech/red-innovadores-lms
spec/cassandra_spec_helper.rb
Ruby
agpl-3.0
994
/*! * Piwik - free/libre analytics platform * * @link http://piwik.org * @license http://www.gnu.org/licenses/gpl-3.0.html GPL v3 or later */ (function () { angular.module('piwikApp.filter').filter('htmldecode', htmldecode); htmldecode.$inject = ['piwik']; /** * Be aware that this filter can cause XSS so only use it when you're sure it is safe. * Eg it should be safe when it is afterwards escaped by angular sanitize again. */ function htmldecode(piwik) { return function(text) { if (text && text.length) { return piwik.helper.htmlDecode(text); } return text; }; } })();
befair/soulShape
wp/soulshape.earth/piwik/plugins/CoreHome/angularjs/common/filters/htmldecode.js
JavaScript
agpl-3.0
686
""" Management command to resend all lti scores for the requested course. """ import textwrap from django.core.management import BaseCommand from opaque_keys.edx.keys import CourseKey from lti_provider.models import GradedAssignment from lti_provider import tasks class Command(BaseCommand): """ Send all lti scores for the requested courses to the registered consumers. If no arguments are provided, send all scores for all courses. Examples: ./manage.py lms resend_lti_scores ./manage.py lms resend_lti_scores course-v1:edX+DemoX+Demo_Course course-v1:UBCx+course+2016-01 """ help = textwrap.dedent(__doc__) def add_arguments(self, parser): parser.add_argument(u'course_keys', type=CourseKey.from_string, nargs='*') def handle(self, *args, **options): if options[u'course_keys']: for course_key in options[u'course_keys']: for assignment in self._iter_course_assignments(course_key): self._send_score(assignment) else: for assignment in self._iter_all_assignments(): self._send_score(assignment) def _send_score(self, assignment): """ Send the score to the LTI consumer for a single assignment. """ tasks.send_composite_outcome.delay( assignment.user_id, unicode(assignment.course_key), assignment.id, assignment.version_number, ) def _iter_all_assignments(self): """ Get all the graded assignments in the system. """ return GradedAssignment.objects.all() def _iter_course_assignments(self, course_key): """ Get all the graded assignments for the given course. """ return GradedAssignment.objects.filter(course_key=course_key)
caesar2164/edx-platform
lms/djangoapps/lti_provider/management/commands/resend_lti_scores.py
Python
agpl-3.0
1,857
#!/usr/bin/env python #*************************************************************************** #* * #* Copyright (c) 2009 Yorik van Havre <yorik@uncreated.net> * #* * #* This program is free software; you can redistribute it and/or modify * #* it under the terms of the GNU Lesser General Public License (LGPL) * #* as published by the Free Software Foundation; either version 2 of * #* the License, or (at your option) any later version. * #* for detail see the LICENCE text file. * #* * #* This program is distributed in the hope that it will be useful, * #* but WITHOUT ANY WARRANTY; without even the implied warranty of * #* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * #* GNU Library General Public License for more details. * #* * #* You should have received a copy of the GNU Library General Public * #* License along with this program; if not, write to the Free Software * #* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * #* USA * #* * #*************************************************************************** __title__="wiki2qhelp" __author__ = "Yorik van Havre <yorik@uncreated.net>" __url__ = "http://www.freecadweb.org" """ This script builds qhrlp files from a local copy of the wiki """ import sys, os, re, tempfile, getopt, shutil from urllib2 import urlopen, HTTPError # CONFIGURATION ################################################# FOLDER = "./localwiki" INDEX = "Online_Help_Toc" # the start page from where to crawl the wiki VERBOSE = True # to display what's going on. Otherwise, runs totally silent. QHELPCOMPILER = 'qhelpgenerator' QCOLLECTIOMGENERATOR = 'qcollectiongenerator' RELEASE = '0.16' # END CONFIGURATION ############################################## fcount = dcount = 0 def crawl(): "downloads an entire wiki site" # tests ############################################### if os.system(QHELPCOMPILER +' -v'): print "Error: QAssistant not fully installed, exiting." return 1 if os.system(QCOLLECTIOMGENERATOR +' -v'): print "Error: QAssistant not fully installed, exiting." return 1 # run ######################################################## qhp = buildtoc() qhcp = createCollProjectFile() if generate(qhcp) or compile(qhp): print "Error at compiling" return 1 if VERBOSE: print "All done!" i=raw_input("Copy the files to their correct location in the source tree? y/n (default=no) ") if i.upper() in ["Y","YES"]: shutil.copy("localwiki/freecad.qch","../../Doc/freecad.qch") shutil.copy("localwiki/freecad.qhc","../../Doc/freecad.qhc") else: print 'Files are in localwiki. Test with "assistant -collectionFile localwiki/freecad.qhc"' return 0 def compile(qhpfile): "compiles the whole html doc with qassistant" qchfile = FOLDER + os.sep + "freecad.qch" if not os.system(QHELPCOMPILER + ' '+qhpfile+' -o '+qchfile): if VERBOSE: print "Successfully created",qchfile return 0 def generate(qhcpfile): "generates qassistant-specific settings like icon, title, ..." txt=""" <center>FreeCAD """+RELEASE+""" help files<br/> <a href="http://www.freecadweb.org">http://www.freecadweb.org</a></center> """ about=open(FOLDER + os.sep + "about.txt","w") about.write(txt) about.close() qhcfile = FOLDER + os.sep + "freecad.qhc" if not os.system(QCOLLECTIOMGENERATOR+' '+qhcpfile+' -o '+qhcfile): if VERBOSE: print "Successfully created ",qhcfile return 0 def createCollProjectFile(): qprojectfile = '''<?xml version="1.0" encoding="UTF-8"?> <QHelpCollectionProject version="1.0"> <assistant> <title>FreeCAD User Manual</title> <applicationIcon>64px-FreeCAD05.svg.png</applicationIcon> <cacheDirectory>freecad/freecad</cacheDirectory> <startPage>qthelp://org.freecad.usermanual/doc/Online_Help_Startpage.html</startPage> <aboutMenuText> <text>About FreeCAD</text> </aboutMenuText> <aboutDialog> <file>about.txt</file> <!-- <icon>images/icon.png</icon> --> <icon>64px-FreeCAD05.svg.png</icon> </aboutDialog> <enableDocumentationManager>true</enableDocumentationManager> <enableAddressBar>true</enableAddressBar> <enableFilterFunctionality>true</enableFilterFunctionality> </assistant> <docFiles> <generate> <file> <input>freecad.qhp</input> <output>freecad.qch</output> </file> </generate> <register> <file>freecad.qch</file> </register> </docFiles> </QHelpCollectionProject> ''' if VERBOSE: print "Building project file..." qfilename = FOLDER + os.sep + "freecad.qhcp" f = open(qfilename,'w') f.write(qprojectfile) f.close() if VERBOSE: print "Done writing qhcp file",qfilename return qfilename def buildtoc(): ''' gets the table of contents page and parses its contents into a clean lists structure ''' qhelpfile = '''<?xml version="1.0" encoding="UTF-8"?> <QtHelpProject version="1.0"> <namespace>org.freecad.usermanual</namespace> <virtualFolder>doc</virtualFolder> <!-- <customFilter name="FreeCAD '''+RELEASE+'''"> <filterAttribute>FreeCAD</filterAttribute> <filterAttribute>'''+RELEASE+'''</filterAttribute> </customFilter> --> <filterSection> <!-- <filterAttribute>FreeCAD</filterAttribute> <filterAttribute>'''+RELEASE+'''</filterAttribute> --> <toc> <inserttoc> </toc> <keywords> <insertkeywords> </keywords> <insertfiles> </filterSection> </QtHelpProject> ''' def getname(line): line = re.compile('<li>').sub('',line) line = re.compile('</li>').sub('',line) title = line.strip() link = '' if "<a" in line: title = re.findall('<a[^>]*>(.*?)</a>',line)[0].strip() link = re.findall('href="(.*?)"',line)[0].strip() if not link: link = 'default.html' return title,link if VERBOSE: print "Building table of contents..." f = open(FOLDER+os.sep+INDEX+'.html') html = '' for line in f: html += line f.close() html = html.replace("\n"," ") html = html.replace("> <","><") html = re.findall("<ul.*/ul>",html)[0] items = re.findall('<li[^>]*>.*?</li>|</ul></li>',html) inserttoc = '<section title="FreeCAD Documentation" ref="Online_Help_Toc.html">\n' insertkeywords = '' for item in items: if not ("<ul>" in item): if ("</ul>" in item): inserttoc += '</section>\n' else: link = '' title,link=getname(item) if link: link='" ref="'+link insertkeywords += ('<keyword name="'+title+link+'"/>\n') inserttoc += ('<section title="'+title+link+'"></section>\n') else: subitems = item.split("<ul>") for i in range(len(subitems)): link = '' title,link=getname(subitems[i]) if link: link='" ref="'+link insertkeywords += ('<keyword name="'+title+link+'"/>\n') trail = '' if i == len(subitems)-1: trail = '</section>' inserttoc += ('<section title="'+title+link+'">'+trail+'\n') inserttoc += '</section>\n' insertfiles = "<files>\n" for fil in os.listdir(FOLDER): insertfiles += ("<file>"+fil+"</file>\n") insertfiles += "</files>\n" qhelpfile = re.compile('<insertkeywords>').sub(insertkeywords,qhelpfile) qhelpfile = re.compile('<inserttoc>').sub(inserttoc,qhelpfile) qhelpfile = re.compile('<insertfiles>').sub(insertfiles,qhelpfile) qfilename = FOLDER + os.sep + "freecad.qhp" f = open(qfilename,'wb') f.write(qhelpfile) f.close() if VERBOSE: print "Done writing qhp file",qfilename return qfilename if __name__ == "__main__": crawl()
kkoksvik/FreeCAD
src/Tools/offlinedoc/buildqhelp.py
Python
lgpl-2.1
8,832
/* * Copyright (C) 2007, 2008 Apple Inc. All rights reserved. * Copyright (C) 2008 Matt Lilek <webkit@mattlilek.com> * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of * its contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ WebInspector.ElementsPanel = function() { WebInspector.Panel.call(this); this.element.addStyleClass("elements"); this.contentElement = document.createElement("div"); this.contentElement.id = "elements-content"; this.contentElement.className = "outline-disclosure"; this.treeOutline = new WebInspector.ElementsTreeOutline(); this.treeOutline.panel = this; this.treeOutline.includeRootDOMNode = false; this.treeOutline.selectEnabled = true; this.treeOutline.focusedNodeChanged = function(forceUpdate) { if (this.panel.visible && WebInspector.currentFocusElement !== document.getElementById("search")) WebInspector.currentFocusElement = document.getElementById("main-panels"); this.panel.updateBreadcrumb(forceUpdate); for (var pane in this.panel.sidebarPanes) this.panel.sidebarPanes[pane].needsUpdate = true; this.panel.updateStyles(true); this.panel.updateMetrics(); this.panel.updateProperties(); if (InspectorController.searchingForNode()) { InspectorController.toggleNodeSearch(); this.panel.nodeSearchButton.removeStyleClass("toggled-on"); } }; this.contentElement.appendChild(this.treeOutline.element); this.crumbsElement = document.createElement("div"); this.crumbsElement.className = "crumbs"; this.crumbsElement.addEventListener("mousemove", this._mouseMovedInCrumbs.bind(this), false); this.crumbsElement.addEventListener("mouseout", this._mouseMovedOutOfCrumbs.bind(this), false); this.sidebarPanes = {}; this.sidebarPanes.styles = new WebInspector.StylesSidebarPane(); this.sidebarPanes.metrics = new WebInspector.MetricsSidebarPane(); this.sidebarPanes.properties = new WebInspector.PropertiesSidebarPane(); this.sidebarPanes.styles.onexpand = this.updateStyles.bind(this); this.sidebarPanes.metrics.onexpand = this.updateMetrics.bind(this); this.sidebarPanes.properties.onexpand = this.updateProperties.bind(this); this.sidebarPanes.styles.expanded = true; this.sidebarPanes.styles.addEventListener("style edited", this._stylesPaneEdited, this); this.sidebarPanes.styles.addEventListener("style property toggled", this._stylesPaneEdited, this); this.sidebarPanes.metrics.addEventListener("metrics edited", this._metricsPaneEdited, this); this.sidebarElement = document.createElement("div"); this.sidebarElement.id = "elements-sidebar"; this.sidebarElement.appendChild(this.sidebarPanes.styles.element); this.sidebarElement.appendChild(this.sidebarPanes.metrics.element); this.sidebarElement.appendChild(this.sidebarPanes.properties.element); this.sidebarResizeElement = document.createElement("div"); this.sidebarResizeElement.className = "sidebar-resizer-vertical"; this.sidebarResizeElement.addEventListener("mousedown", this.rightSidebarResizerDragStart.bind(this), false); this.nodeSearchButton = document.createElement("button"); this.nodeSearchButton.title = WebInspector.UIString("Select an element in the page to inspect it."); this.nodeSearchButton.id = "node-search-status-bar-item"; this.nodeSearchButton.className = "status-bar-item"; this.nodeSearchButton.addEventListener("click", this._nodeSearchButtonClicked.bind(this), false); this.searchingForNode = false; this.element.appendChild(this.contentElement); this.element.appendChild(this.sidebarElement); this.element.appendChild(this.sidebarResizeElement); this._mutationMonitoredWindows = []; this._nodeInsertedEventListener = InspectorController.wrapCallback(this._nodeInserted.bind(this)); this._nodeRemovedEventListener = InspectorController.wrapCallback(this._nodeRemoved.bind(this)); this._contentLoadedEventListener = InspectorController.wrapCallback(this._contentLoaded.bind(this)); this.reset(); } WebInspector.ElementsPanel.prototype = { toolbarItemClass: "elements", get toolbarItemLabel() { return WebInspector.UIString("Elements"); }, get statusBarItems() { return [this.nodeSearchButton, this.crumbsElement]; }, updateStatusBarItems: function() { this.updateBreadcrumbSizes(); }, show: function() { WebInspector.Panel.prototype.show.call(this); this.sidebarResizeElement.style.right = (this.sidebarElement.offsetWidth - 3) + "px"; this.updateBreadcrumb(); this.treeOutline.updateSelection(); if (this.recentlyModifiedNodes.length) this._updateModifiedNodes(); }, hide: function() { WebInspector.Panel.prototype.hide.call(this); WebInspector.hoveredDOMNode = null; if (InspectorController.searchingForNode()) { InspectorController.toggleNodeSearch(); this.nodeSearchButton.removeStyleClass("toggled-on"); } }, resize: function() { this.treeOutline.updateSelection(); this.updateBreadcrumbSizes(); }, reset: function() { this.rootDOMNode = null; this.focusedDOMNode = null; WebInspector.hoveredDOMNode = null; if (InspectorController.searchingForNode()) { InspectorController.toggleNodeSearch(); this.nodeSearchButton.removeStyleClass("toggled-on"); } this.recentlyModifiedNodes = []; this.unregisterAllMutationEventListeners(); delete this.currentQuery; this.searchCanceled(); var inspectedWindow = InspectorController.inspectedWindow(); if (!inspectedWindow || !inspectedWindow.document) return; if (!inspectedWindow.document.firstChild) { function contentLoaded() { inspectedWindow.document.removeEventListener("DOMContentLoaded", contentLoadedCallback, false); this.reset(); } var contentLoadedCallback = InspectorController.wrapCallback(contentLoaded.bind(this)); inspectedWindow.document.addEventListener("DOMContentLoaded", contentLoadedCallback, false); return; } // If the window isn't visible, return early so the DOM tree isn't built // and mutation event listeners are not added. if (!InspectorController.isWindowVisible()) return; this.registerMutationEventListeners(inspectedWindow); var inspectedRootDocument = inspectedWindow.document; this.rootDOMNode = inspectedRootDocument; var canidateFocusNode = inspectedRootDocument.body || inspectedRootDocument.documentElement; if (canidateFocusNode) { this.treeOutline.suppressSelectHighlight = true; this.focusedDOMNode = canidateFocusNode; this.treeOutline.suppressSelectHighlight = false; if (this.treeOutline.selectedTreeElement) this.treeOutline.selectedTreeElement.expand(); } }, includedInSearchResultsPropertyName: "__includedInInspectorSearchResults", searchCanceled: function() { if (this._searchResults) { const searchResultsProperty = this.includedInSearchResultsPropertyName; for (var i = 0; i < this._searchResults.length; ++i) { var node = this._searchResults[i]; // Remove the searchResultsProperty since there might be an unfinished search. delete node[searchResultsProperty]; var treeElement = this.treeOutline.findTreeElement(node); if (treeElement) treeElement.highlighted = false; } } WebInspector.updateSearchMatchesCount(0, this); if (this._currentSearchChunkIntervalIdentifier) { clearInterval(this._currentSearchChunkIntervalIdentifier); delete this._currentSearchChunkIntervalIdentifier; } this._currentSearchResultIndex = 0; this._searchResults = []; }, performSearch: function(query) { // Call searchCanceled since it will reset everything we need before doing a new search. this.searchCanceled(); const whitespaceTrimmedQuery = query.trimWhitespace(); if (!whitespaceTrimmedQuery.length) return; var tagNameQuery = whitespaceTrimmedQuery; var attributeNameQuery = whitespaceTrimmedQuery; var startTagFound = (tagNameQuery.indexOf("<") === 0); var endTagFound = (tagNameQuery.lastIndexOf(">") === (tagNameQuery.length - 1)); if (startTagFound || endTagFound) { var tagNameQueryLength = tagNameQuery.length; tagNameQuery = tagNameQuery.substring((startTagFound ? 1 : 0), (endTagFound ? (tagNameQueryLength - 1) : tagNameQueryLength)); } // Check the tagNameQuery is it is a possibly valid tag name. if (!/^[a-zA-Z0-9\-_:]+$/.test(tagNameQuery)) tagNameQuery = null; // Check the attributeNameQuery is it is a possibly valid tag name. if (!/^[a-zA-Z0-9\-_:]+$/.test(attributeNameQuery)) attributeNameQuery = null; const escapedQuery = query.escapeCharacters("'"); const escapedTagNameQuery = (tagNameQuery ? tagNameQuery.escapeCharacters("'") : null); const escapedWhitespaceTrimmedQuery = whitespaceTrimmedQuery.escapeCharacters("'"); const searchResultsProperty = this.includedInSearchResultsPropertyName; var updatedMatchCountOnce = false; var matchesCountUpdateTimeout = null; function updateMatchesCount() { WebInspector.updateSearchMatchesCount(this._searchResults.length, this); matchesCountUpdateTimeout = null; updatedMatchCountOnce = true; } function updateMatchesCountSoon() { if (!updatedMatchCountOnce) return updateMatchesCount.call(this); if (matchesCountUpdateTimeout) return; // Update the matches count every half-second so it doesn't feel twitchy. matchesCountUpdateTimeout = setTimeout(updateMatchesCount.bind(this), 500); } function addNodesToResults(nodes, length, getItem) { if (!length) return; for (var i = 0; i < length; ++i) { var node = getItem.call(nodes, i); // Skip this node if it already has the property. if (searchResultsProperty in node) continue; if (!this._searchResults.length) { this._currentSearchResultIndex = 0; this.focusedDOMNode = node; } node[searchResultsProperty] = true; this._searchResults.push(node); // Highlight the tree element to show it matched the search. // FIXME: highlight the substrings in text nodes and attributes. var treeElement = this.treeOutline.findTreeElement(node); if (treeElement) treeElement.highlighted = true; } updateMatchesCountSoon.call(this); } function matchExactItems(doc) { matchExactId.call(this, doc); matchExactClassNames.call(this, doc); matchExactTagNames.call(this, doc); matchExactAttributeNames.call(this, doc); } function matchExactId(doc) { const result = doc.__proto__.getElementById.call(doc, whitespaceTrimmedQuery); addNodesToResults.call(this, result, (result ? 1 : 0), function() { return this }); } function matchExactClassNames(doc) { const result = doc.__proto__.getElementsByClassName.call(doc, whitespaceTrimmedQuery); addNodesToResults.call(this, result, result.length, result.item); } function matchExactTagNames(doc) { if (!tagNameQuery) return; const result = doc.__proto__.getElementsByTagName.call(doc, tagNameQuery); addNodesToResults.call(this, result, result.length, result.item); } function matchExactAttributeNames(doc) { if (!attributeNameQuery) return; const result = doc.__proto__.querySelectorAll.call(doc, "[" + attributeNameQuery + "]"); addNodesToResults.call(this, result, result.length, result.item); } function matchPartialTagNames(doc) { if (!tagNameQuery) return; const result = doc.__proto__.evaluate.call(doc, "//*[contains(name(), '" + escapedTagNameQuery + "')]", doc, null, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE); addNodesToResults.call(this, result, result.snapshotLength, result.snapshotItem); } function matchStartOfTagNames(doc) { if (!tagNameQuery) return; const result = doc.__proto__.evaluate.call(doc, "//*[starts-with(name(), '" + escapedTagNameQuery + "')]", doc, null, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE); addNodesToResults.call(this, result, result.snapshotLength, result.snapshotItem); } function matchPartialTagNamesAndAttributeValues(doc) { if (!tagNameQuery) { matchPartialAttributeValues.call(this, doc); return; } const result = doc.__proto__.evaluate.call(doc, "//*[contains(name(), '" + escapedTagNameQuery + "') or contains(@*, '" + escapedQuery + "')]", doc, null, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE); addNodesToResults.call(this, result, result.snapshotLength, result.snapshotItem); } function matchPartialAttributeValues(doc) { const result = doc.__proto__.evaluate.call(doc, "//*[contains(@*, '" + escapedQuery + "')]", doc, null, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE); addNodesToResults.call(this, result, result.snapshotLength, result.snapshotItem); } function matchStyleSelector(doc) { const result = doc.__proto__.querySelectorAll.call(doc, whitespaceTrimmedQuery); addNodesToResults.call(this, result, result.length, result.item); } function matchPlainText(doc) { const result = doc.__proto__.evaluate.call(doc, "//text()[contains(., '" + escapedQuery + "')] | //comment()[contains(., '" + escapedQuery + "')]", doc, null, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE); addNodesToResults.call(this, result, result.snapshotLength, result.snapshotItem); } function matchXPathQuery(doc) { const result = doc.__proto__.evaluate.call(doc, whitespaceTrimmedQuery, doc, null, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE); addNodesToResults.call(this, result, result.snapshotLength, result.snapshotItem); } function finishedSearching() { // Remove the searchResultsProperty now that the search is finished. for (var i = 0; i < this._searchResults.length; ++i) delete this._searchResults[i][searchResultsProperty]; } const mainFrameDocument = InspectorController.inspectedWindow().document; const searchDocuments = [mainFrameDocument]; if (tagNameQuery && startTagFound && endTagFound) const searchFunctions = [matchExactTagNames, matchPlainText]; else if (tagNameQuery && startTagFound) const searchFunctions = [matchStartOfTagNames, matchPlainText]; else if (tagNameQuery && endTagFound) { // FIXME: we should have a matchEndOfTagNames search function if endTagFound is true but not startTagFound. // This requires ends-with() support in XPath, WebKit only supports starts-with() and contains(). const searchFunctions = [matchPartialTagNames, matchPlainText]; } else if (whitespaceTrimmedQuery === "//*" || whitespaceTrimmedQuery === "*") { // These queries will match every node. Matching everything isn't useful and can be slow for large pages, // so limit the search functions list to plain text and attribute matching. const searchFunctions = [matchPartialAttributeValues, matchPlainText]; } else const searchFunctions = [matchExactItems, matchStyleSelector, matchPartialTagNamesAndAttributeValues, matchPlainText, matchXPathQuery]; // Find all frames, iframes and object elements to search their documents. const querySelectorAllFunction = InspectorController.inspectedWindow().Document.prototype.querySelectorAll; const subdocumentResult = querySelectorAllFunction.call(mainFrameDocument, "iframe, frame, object"); for (var i = 0; i < subdocumentResult.length; ++i) { var element = subdocumentResult.item(i); if (element.contentDocument) searchDocuments.push(element.contentDocument); } const panel = this; var documentIndex = 0; var searchFunctionIndex = 0; var chunkIntervalIdentifier = null; // Split up the work into chunks so we don't block the UI thread while processing. function processChunk() { var searchDocument = searchDocuments[documentIndex]; var searchFunction = searchFunctions[searchFunctionIndex]; if (++searchFunctionIndex > searchFunctions.length) { searchFunction = searchFunctions[0]; searchFunctionIndex = 0; if (++documentIndex > searchDocuments.length) { if (panel._currentSearchChunkIntervalIdentifier === chunkIntervalIdentifier) delete panel._currentSearchChunkIntervalIdentifier; clearInterval(chunkIntervalIdentifier); finishedSearching.call(panel); return; } searchDocument = searchDocuments[documentIndex]; } if (!searchDocument || !searchFunction) return; try { searchFunction.call(panel, searchDocument); } catch(err) { // ignore any exceptions. the query might be malformed, but we allow that. } } processChunk(); chunkIntervalIdentifier = setInterval(processChunk, 25); this._currentSearchChunkIntervalIdentifier = chunkIntervalIdentifier; }, jumpToNextSearchResult: function() { if (!this._searchResults || !this._searchResults.length) return; if (++this._currentSearchResultIndex >= this._searchResults.length) this._currentSearchResultIndex = 0; this.focusedDOMNode = this._searchResults[this._currentSearchResultIndex]; }, jumpToPreviousSearchResult: function() { if (!this._searchResults || !this._searchResults.length) return; if (--this._currentSearchResultIndex < 0) this._currentSearchResultIndex = (this._searchResults.length - 1); this.focusedDOMNode = this._searchResults[this._currentSearchResultIndex]; }, inspectedWindowCleared: function(window) { if (InspectorController.isWindowVisible()) this.updateMutationEventListeners(window); }, _addMutationEventListeners: function(monitoredWindow) { monitoredWindow.document.addEventListener("DOMNodeInserted", this._nodeInsertedEventListener, true); monitoredWindow.document.addEventListener("DOMNodeRemoved", this._nodeRemovedEventListener, true); if (monitoredWindow.frameElement) monitoredWindow.addEventListener("DOMContentLoaded", this._contentLoadedEventListener, true); }, _removeMutationEventListeners: function(monitoredWindow) { if (monitoredWindow.frameElement) monitoredWindow.removeEventListener("DOMContentLoaded", this._contentLoadedEventListener, true); if (!monitoredWindow.document) return; monitoredWindow.document.removeEventListener("DOMNodeInserted", this._nodeInsertedEventListener, true); monitoredWindow.document.removeEventListener("DOMNodeRemoved", this._nodeRemovedEventListener, true); }, updateMutationEventListeners: function(monitoredWindow) { this._addMutationEventListeners(monitoredWindow); }, registerMutationEventListeners: function(monitoredWindow) { if (!monitoredWindow || this._mutationMonitoredWindows.indexOf(monitoredWindow) !== -1) return; this._mutationMonitoredWindows.push(monitoredWindow); if (InspectorController.isWindowVisible()) this._addMutationEventListeners(monitoredWindow); }, unregisterMutationEventListeners: function(monitoredWindow) { if (!monitoredWindow || this._mutationMonitoredWindows.indexOf(monitoredWindow) === -1) return; this._mutationMonitoredWindows.remove(monitoredWindow); this._removeMutationEventListeners(monitoredWindow); }, unregisterAllMutationEventListeners: function() { for (var i = 0; i < this._mutationMonitoredWindows.length; ++i) this._removeMutationEventListeners(this._mutationMonitoredWindows[i]); this._mutationMonitoredWindows = []; }, get rootDOMNode() { return this.treeOutline.rootDOMNode; }, set rootDOMNode(x) { this.treeOutline.rootDOMNode = x; }, get focusedDOMNode() { return this.treeOutline.focusedDOMNode; }, set focusedDOMNode(x) { this.treeOutline.focusedDOMNode = x; }, _contentLoaded: function(event) { this.recentlyModifiedNodes.push({node: event.target, parent: event.target.defaultView.frameElement, replaced: true}); if (this.visible) this._updateModifiedNodesSoon(); }, _nodeInserted: function(event) { this.recentlyModifiedNodes.push({node: event.target, parent: event.relatedNode, inserted: true}); if (this.visible) this._updateModifiedNodesSoon(); }, _nodeRemoved: function(event) { this.recentlyModifiedNodes.push({node: event.target, parent: event.relatedNode, removed: true}); if (this.visible) this._updateModifiedNodesSoon(); }, _updateModifiedNodesSoon: function() { if ("_updateModifiedNodesTimeout" in this) return; this._updateModifiedNodesTimeout = setTimeout(this._updateModifiedNodes.bind(this), 0); }, _updateModifiedNodes: function() { if ("_updateModifiedNodesTimeout" in this) { clearTimeout(this._updateModifiedNodesTimeout); delete this._updateModifiedNodesTimeout; } var updatedParentTreeElements = []; var updateBreadcrumbs = false; for (var i = 0; i < this.recentlyModifiedNodes.length; ++i) { var replaced = this.recentlyModifiedNodes[i].replaced; var parent = this.recentlyModifiedNodes[i].parent; if (!parent) continue; var parentNodeItem = this.treeOutline.findTreeElement(parent, null, null, objectsAreSame); if (parentNodeItem && !parentNodeItem.alreadyUpdatedChildren) { parentNodeItem.updateChildren(replaced); parentNodeItem.alreadyUpdatedChildren = true; updatedParentTreeElements.push(parentNodeItem); } if (!updateBreadcrumbs && (objectsAreSame(this.focusedDOMNode, parent) || isAncestorIncludingParentFrames(this.focusedDOMNode, parent))) updateBreadcrumbs = true; } for (var i = 0; i < updatedParentTreeElements.length; ++i) delete updatedParentTreeElements[i].alreadyUpdatedChildren; this.recentlyModifiedNodes = []; if (updateBreadcrumbs) this.updateBreadcrumb(true); }, _stylesPaneEdited: function() { this.sidebarPanes.metrics.needsUpdate = true; this.updateMetrics(); }, _metricsPaneEdited: function() { this.sidebarPanes.styles.needsUpdate = true; this.updateStyles(true); }, _mouseMovedInCrumbs: function(event) { var nodeUnderMouse = document.elementFromPoint(event.pageX, event.pageY); var crumbElement = nodeUnderMouse.enclosingNodeOrSelfWithClass("crumb"); WebInspector.hoveredDOMNode = (crumbElement ? crumbElement.representedObject : null); if ("_mouseOutOfCrumbsTimeout" in this) { clearTimeout(this._mouseOutOfCrumbsTimeout); delete this._mouseOutOfCrumbsTimeout; } }, _mouseMovedOutOfCrumbs: function(event) { var nodeUnderMouse = document.elementFromPoint(event.pageX, event.pageY); if (nodeUnderMouse.isDescendant(this.crumbsElement)) return; WebInspector.hoveredDOMNode = null; this._mouseOutOfCrumbsTimeout = setTimeout(this.updateBreadcrumbSizes.bind(this), 1000); }, updateBreadcrumb: function(forceUpdate) { if (!this.visible) return; var crumbs = this.crumbsElement; var handled = false; var foundRoot = false; var crumb = crumbs.firstChild; while (crumb) { if (objectsAreSame(crumb.representedObject, this.rootDOMNode)) foundRoot = true; if (foundRoot) crumb.addStyleClass("dimmed"); else crumb.removeStyleClass("dimmed"); if (objectsAreSame(crumb.representedObject, this.focusedDOMNode)) { crumb.addStyleClass("selected"); handled = true; } else { crumb.removeStyleClass("selected"); } crumb = crumb.nextSibling; } if (handled && !forceUpdate) { // We don't need to rebuild the crumbs, but we need to adjust sizes // to reflect the new focused or root node. this.updateBreadcrumbSizes(); return; } crumbs.removeChildren(); var panel = this; function selectCrumbFunction(event) { var crumb = event.currentTarget; if (crumb.hasStyleClass("collapsed")) { // Clicking a collapsed crumb will expose the hidden crumbs. if (crumb === panel.crumbsElement.firstChild) { // If the focused crumb is the first child, pick the farthest crumb // that is still hidden. This allows the user to expose every crumb. var currentCrumb = crumb; while (currentCrumb) { var hidden = currentCrumb.hasStyleClass("hidden"); var collapsed = currentCrumb.hasStyleClass("collapsed"); if (!hidden && !collapsed) break; crumb = currentCrumb; currentCrumb = currentCrumb.nextSibling; } } panel.updateBreadcrumbSizes(crumb); } else { // Clicking a dimmed crumb or double clicking (event.detail >= 2) // will change the root node in addition to the focused node. if (event.detail >= 2 || crumb.hasStyleClass("dimmed")) panel.rootDOMNode = crumb.representedObject.parentNode; panel.focusedDOMNode = crumb.representedObject; } event.preventDefault(); } foundRoot = false; for (var current = this.focusedDOMNode; current; current = parentNodeOrFrameElement(current)) { if (current.nodeType === Node.DOCUMENT_NODE) continue; if (objectsAreSame(current, this.rootDOMNode)) foundRoot = true; var crumb = document.createElement("span"); crumb.className = "crumb"; crumb.representedObject = current; crumb.addEventListener("mousedown", selectCrumbFunction, false); var crumbTitle; switch (current.nodeType) { case Node.ELEMENT_NODE: crumbTitle = current.nodeName.toLowerCase(); var nameElement = document.createElement("span"); nameElement.textContent = crumbTitle; crumb.appendChild(nameElement); var idAttribute = current.getAttribute("id"); if (idAttribute) { var idElement = document.createElement("span"); crumb.appendChild(idElement); var part = "#" + idAttribute; crumbTitle += part; idElement.appendChild(document.createTextNode(part)); // Mark the name as extra, since the ID is more important. nameElement.className = "extra"; } var classAttribute = current.getAttribute("class"); if (classAttribute) { var classes = classAttribute.split(/\s+/); var foundClasses = {}; if (classes.length) { var classesElement = document.createElement("span"); classesElement.className = "extra"; crumb.appendChild(classesElement); for (var i = 0; i < classes.length; ++i) { var className = classes[i]; if (className && !(className in foundClasses)) { var part = "." + className; crumbTitle += part; classesElement.appendChild(document.createTextNode(part)); foundClasses[className] = true; } } } } break; case Node.TEXT_NODE: if (isNodeWhitespace.call(current)) crumbTitle = WebInspector.UIString("(whitespace)"); else crumbTitle = WebInspector.UIString("(text)"); break case Node.COMMENT_NODE: crumbTitle = "<!-->"; break; case Node.DOCUMENT_TYPE_NODE: crumbTitle = "<!DOCTYPE>"; break; default: crumbTitle = current.nodeName.toLowerCase(); } if (!crumb.childNodes.length) { var nameElement = document.createElement("span"); nameElement.textContent = crumbTitle; crumb.appendChild(nameElement); } crumb.title = crumbTitle; if (foundRoot) crumb.addStyleClass("dimmed"); if (objectsAreSame(current, this.focusedDOMNode)) crumb.addStyleClass("selected"); if (!crumbs.childNodes.length) crumb.addStyleClass("end"); crumbs.appendChild(crumb); } if (crumbs.hasChildNodes()) crumbs.lastChild.addStyleClass("start"); this.updateBreadcrumbSizes(); }, updateBreadcrumbSizes: function(focusedCrumb) { if (!this.visible) return; if (document.body.offsetWidth <= 0) { // The stylesheet hasn't loaded yet or the window is closed, // so we can't calculate what is need. Return early. return; } var crumbs = this.crumbsElement; if (!crumbs.childNodes.length || crumbs.offsetWidth <= 0) return; // No crumbs, do nothing. // A Zero index is the right most child crumb in the breadcrumb. var selectedIndex = 0; var focusedIndex = 0; var selectedCrumb; var i = 0; var crumb = crumbs.firstChild; while (crumb) { // Find the selected crumb and index. if (!selectedCrumb && crumb.hasStyleClass("selected")) { selectedCrumb = crumb; selectedIndex = i; } // Find the focused crumb index. if (crumb === focusedCrumb) focusedIndex = i; // Remove any styles that affect size before // deciding to shorten any crumbs. if (crumb !== crumbs.lastChild) crumb.removeStyleClass("start"); if (crumb !== crumbs.firstChild) crumb.removeStyleClass("end"); crumb.removeStyleClass("compact"); crumb.removeStyleClass("collapsed"); crumb.removeStyleClass("hidden"); crumb = crumb.nextSibling; ++i; } // Restore the start and end crumb classes in case they got removed in coalesceCollapsedCrumbs(). // The order of the crumbs in the document is opposite of the visual order. crumbs.firstChild.addStyleClass("end"); crumbs.lastChild.addStyleClass("start"); function crumbsAreSmallerThanContainer() { var rightPadding = 20; var errorWarningElement = document.getElementById("error-warning-count"); if (!WebInspector.console.visible && errorWarningElement) rightPadding += errorWarningElement.offsetWidth; return ((crumbs.totalOffsetLeft + crumbs.offsetWidth + rightPadding) < window.innerWidth); } if (crumbsAreSmallerThanContainer()) return; // No need to compact the crumbs, they all fit at full size. var BothSides = 0; var AncestorSide = -1; var ChildSide = 1; function makeCrumbsSmaller(shrinkingFunction, direction, significantCrumb) { if (!significantCrumb) significantCrumb = (focusedCrumb || selectedCrumb); if (significantCrumb === selectedCrumb) var significantIndex = selectedIndex; else if (significantCrumb === focusedCrumb) var significantIndex = focusedIndex; else { var significantIndex = 0; for (var i = 0; i < crumbs.childNodes.length; ++i) { if (crumbs.childNodes[i] === significantCrumb) { significantIndex = i; break; } } } function shrinkCrumbAtIndex(index) { var shrinkCrumb = crumbs.childNodes[index]; if (shrinkCrumb && shrinkCrumb !== significantCrumb) shrinkingFunction(shrinkCrumb); if (crumbsAreSmallerThanContainer()) return true; // No need to compact the crumbs more. return false; } // Shrink crumbs one at a time by applying the shrinkingFunction until the crumbs // fit in the container or we run out of crumbs to shrink. if (direction) { // Crumbs are shrunk on only one side (based on direction) of the signifcant crumb. var index = (direction > 0 ? 0 : crumbs.childNodes.length - 1); while (index !== significantIndex) { if (shrinkCrumbAtIndex(index)) return true; index += (direction > 0 ? 1 : -1); } } else { // Crumbs are shrunk in order of descending distance from the signifcant crumb, // with a tie going to child crumbs. var startIndex = 0; var endIndex = crumbs.childNodes.length - 1; while (startIndex != significantIndex || endIndex != significantIndex) { var startDistance = significantIndex - startIndex; var endDistance = endIndex - significantIndex; if (startDistance >= endDistance) var index = startIndex++; else var index = endIndex--; if (shrinkCrumbAtIndex(index)) return true; } } // We are not small enough yet, return false so the caller knows. return false; } function coalesceCollapsedCrumbs() { var crumb = crumbs.firstChild; var collapsedRun = false; var newStartNeeded = false; var newEndNeeded = false; while (crumb) { var hidden = crumb.hasStyleClass("hidden"); if (!hidden) { var collapsed = crumb.hasStyleClass("collapsed"); if (collapsedRun && collapsed) { crumb.addStyleClass("hidden"); crumb.removeStyleClass("compact"); crumb.removeStyleClass("collapsed"); if (crumb.hasStyleClass("start")) { crumb.removeStyleClass("start"); newStartNeeded = true; } if (crumb.hasStyleClass("end")) { crumb.removeStyleClass("end"); newEndNeeded = true; } continue; } collapsedRun = collapsed; if (newEndNeeded) { newEndNeeded = false; crumb.addStyleClass("end"); } } else collapsedRun = true; crumb = crumb.nextSibling; } if (newStartNeeded) { crumb = crumbs.lastChild; while (crumb) { if (!crumb.hasStyleClass("hidden")) { crumb.addStyleClass("start"); break; } crumb = crumb.previousSibling; } } } function compact(crumb) { if (crumb.hasStyleClass("hidden")) return; crumb.addStyleClass("compact"); } function collapse(crumb, dontCoalesce) { if (crumb.hasStyleClass("hidden")) return; crumb.addStyleClass("collapsed"); crumb.removeStyleClass("compact"); if (!dontCoalesce) coalesceCollapsedCrumbs(); } function compactDimmed(crumb) { if (crumb.hasStyleClass("dimmed")) compact(crumb); } function collapseDimmed(crumb) { if (crumb.hasStyleClass("dimmed")) collapse(crumb); } if (!focusedCrumb) { // When not focused on a crumb we can be biased and collapse less important // crumbs that the user might not care much about. // Compact child crumbs. if (makeCrumbsSmaller(compact, ChildSide)) return; // Collapse child crumbs. if (makeCrumbsSmaller(collapse, ChildSide)) return; // Compact dimmed ancestor crumbs. if (makeCrumbsSmaller(compactDimmed, AncestorSide)) return; // Collapse dimmed ancestor crumbs. if (makeCrumbsSmaller(collapseDimmed, AncestorSide)) return; } // Compact ancestor crumbs, or from both sides if focused. if (makeCrumbsSmaller(compact, (focusedCrumb ? BothSides : AncestorSide))) return; // Collapse ancestor crumbs, or from both sides if focused. if (makeCrumbsSmaller(collapse, (focusedCrumb ? BothSides : AncestorSide))) return; if (!selectedCrumb) return; // Compact the selected crumb. compact(selectedCrumb); if (crumbsAreSmallerThanContainer()) return; // Collapse the selected crumb as a last resort. Pass true to prevent coalescing. collapse(selectedCrumb, true); }, updateStyles: function(forceUpdate) { var stylesSidebarPane = this.sidebarPanes.styles; if (!stylesSidebarPane.expanded || !stylesSidebarPane.needsUpdate) return; stylesSidebarPane.update(this.focusedDOMNode, null, forceUpdate); stylesSidebarPane.needsUpdate = false; }, updateMetrics: function() { var metricsSidebarPane = this.sidebarPanes.metrics; if (!metricsSidebarPane.expanded || !metricsSidebarPane.needsUpdate) return; metricsSidebarPane.update(this.focusedDOMNode); metricsSidebarPane.needsUpdate = false; }, updateProperties: function() { var propertiesSidebarPane = this.sidebarPanes.properties; if (!propertiesSidebarPane.expanded || !propertiesSidebarPane.needsUpdate) return; propertiesSidebarPane.update(this.focusedDOMNode); propertiesSidebarPane.needsUpdate = false; }, handleKeyEvent: function(event) { this.treeOutline.handleKeyEvent(event); }, handleCopyEvent: function(event) { // Don't prevent the normal copy if the user has a selection. if (!window.getSelection().isCollapsed) return; switch (this.focusedDOMNode.nodeType) { case Node.ELEMENT_NODE: var data = this.focusedDOMNode.outerHTML; break; case Node.COMMENT_NODE: var data = "<!--" + this.focusedDOMNode.nodeValue + "-->"; break; default: case Node.TEXT_NODE: var data = this.focusedDOMNode.nodeValue; } event.clipboardData.clearData(); event.preventDefault(); if (data) event.clipboardData.setData("text/plain", data); }, rightSidebarResizerDragStart: function(event) { WebInspector.elementDragStart(this.sidebarElement, this.rightSidebarResizerDrag.bind(this), this.rightSidebarResizerDragEnd.bind(this), event, "col-resize"); }, rightSidebarResizerDragEnd: function(event) { WebInspector.elementDragEnd(event); }, rightSidebarResizerDrag: function(event) { var x = event.pageX; var newWidth = Number.constrain(window.innerWidth - x, Preferences.minElementsSidebarWidth, window.innerWidth * 0.66); this.sidebarElement.style.width = newWidth + "px"; this.contentElement.style.right = newWidth + "px"; this.sidebarResizeElement.style.right = (newWidth - 3) + "px"; this.treeOutline.updateSelection(); event.preventDefault(); }, _nodeSearchButtonClicked: function(event) { InspectorController.toggleNodeSearch(); if (InspectorController.searchingForNode()) this.nodeSearchButton.addStyleClass("toggled-on"); else this.nodeSearchButton.removeStyleClass("toggled-on"); } } WebInspector.ElementsPanel.prototype.__proto__ = WebInspector.Panel.prototype;
RLovelett/qt
src/3rdparty/webkit/WebCore/inspector/front-end/ElementsPanel.js
JavaScript
lgpl-2.1
45,561
//* This file is part of the MOOSE framework //* https://www.mooseframework.org //* //* All rights reserved, see COPYRIGHT for full restrictions //* https://github.com/idaholab/moose/blob/master/COPYRIGHT //* //* Licensed under LGPL 2.1, please see LICENSE for details //* https://www.gnu.org/licenses/lgpl-2.1.html #include "ADComputeFiniteStrainElasticStress.h" registerMooseObject("TensorMechanicsApp", ADComputeFiniteStrainElasticStress); InputParameters ADComputeFiniteStrainElasticStress::validParams() { InputParameters params = ADComputeStressBase::validParams(); params.addClassDescription("Compute stress using elasticity for finite strains"); return params; } ADComputeFiniteStrainElasticStress::ADComputeFiniteStrainElasticStress( const InputParameters & parameters) : ADComputeStressBase(parameters), GuaranteeConsumer(this), _elasticity_tensor_name(_base_name + "elasticity_tensor"), _elasticity_tensor(getADMaterialProperty<RankFourTensor>(_elasticity_tensor_name)), _strain_increment(getADMaterialPropertyByName<RankTwoTensor>(_base_name + "strain_increment")), _rotation_total(declareADProperty<RankTwoTensor>(_base_name + "rotation_total")), _rotation_total_old(getMaterialPropertyOldByName<RankTwoTensor>(_base_name + "rotation_total")), _rotation_increment( getADMaterialPropertyByName<RankTwoTensor>(_base_name + "rotation_increment")), _stress_old(getMaterialPropertyOldByName<RankTwoTensor>(_base_name + "stress")), _elastic_strain_old(getMaterialPropertyOldByName<RankTwoTensor>(_base_name + "elastic_strain")) { } void ADComputeFiniteStrainElasticStress::initialSetup() { } void ADComputeFiniteStrainElasticStress::initQpStatefulProperties() { ADComputeStressBase::initQpStatefulProperties(); RankTwoTensor identity_rotation(RankTwoTensor::initIdentity); _rotation_total[_qp] = identity_rotation; } void ADComputeFiniteStrainElasticStress::computeQpStress() { // Calculate the stress in the intermediate configuration ADRankTwoTensor intermediate_stress; if (hasGuaranteedMaterialProperty(_elasticity_tensor_name, Guarantee::ISOTROPIC)) intermediate_stress = _elasticity_tensor[_qp] * (_strain_increment[_qp] + _elastic_strain_old[_qp]); else { // Rotate elasticity tensor to the intermediate configuration // That is, elasticity tensor is defined in the previous time step // This is consistent with the definition of strain increment // The stress is projected onto the current configuration a few lines below ADRankFourTensor elasticity_tensor_rotated = _elasticity_tensor[_qp]; elasticity_tensor_rotated.rotate(_rotation_total_old[_qp]); intermediate_stress = elasticity_tensor_rotated * (_elastic_strain_old[_qp] + _strain_increment[_qp]); // Update current total rotation matrix to be used in next step _rotation_total[_qp] = _rotation_increment[_qp] * _rotation_total_old[_qp]; } // Rotate the stress state to the current configuration _stress[_qp] = _rotation_increment[_qp] * intermediate_stress * _rotation_increment[_qp].transpose(); // Assign value for elastic strain, which is equal to the mechanical strain _elastic_strain[_qp] = _mechanical_strain[_qp]; }
harterj/moose
modules/tensor_mechanics/src/materials/ADComputeFiniteStrainElasticStress.C
C++
lgpl-2.1
3,252
/* ** 2014 December 16 ** ** The author disclaims copyright to this source code. In place of ** a legal notice, here is a blessing: ** May you do good and not evil. ** May you find forgiveness for yourself and forgive others. ** May you share freely, never taking more than you give. */ package info.ata4.disunity.cli.command; import com.beust.jcommander.Parameter; import com.beust.jcommander.Parameters; import info.ata4.disunity.cli.converters.PathConverter; import info.ata4.io.util.PathUtils; import info.ata4.unity.assetbundle.AssetBundleUtils; import java.io.IOException; import java.nio.file.Path; /** * * @author Nico Bergemann <barracuda415 at yahoo.de> */ @Parameters( commandNames = "bundle-build", commandDescription = "Builds an asset bundle from a .json property file." ) public class BundleBuildCommand extends SingleFileCommand { @Parameter( names = {"-o", "--output"}, description = "Asset bundle output file", converter = PathConverter.class ) private Path outFile; @Override public void handleFile(Path file) throws IOException { if (outFile == null) { String fileName = PathUtils.getBaseName(file); outFile = file.getParent().resolve(fileName + ".unity3d"); } AssetBundleUtils.build(file, outFile); } }
catinred2/disunity
disunity-cli/src/main/java/info/ata4/disunity/cli/command/BundleBuildCommand.java
Java
unlicense
1,357
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.client.slm; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.function.Function; import java.util.stream.Collectors; public class SnapshotLifecycleStats implements ToXContentObject { private final long retentionRunCount; private final long retentionFailedCount; private final long retentionTimedOut; private final long retentionTimeMs; private final Map<String, SnapshotPolicyStats> policyStats; public static final ParseField RETENTION_RUNS = new ParseField("retention_runs"); public static final ParseField RETENTION_FAILED = new ParseField("retention_failed"); public static final ParseField RETENTION_TIMED_OUT = new ParseField("retention_timed_out"); public static final ParseField RETENTION_TIME = new ParseField("retention_deletion_time"); public static final ParseField RETENTION_TIME_MILLIS = new ParseField("retention_deletion_time_millis"); public static final ParseField POLICY_STATS = new ParseField("policy_stats"); public static final ParseField TOTAL_TAKEN = new ParseField("total_snapshots_taken"); public static final ParseField TOTAL_FAILED = new ParseField("total_snapshots_failed"); public static final ParseField TOTAL_DELETIONS = new ParseField("total_snapshots_deleted"); public static final ParseField TOTAL_DELETION_FAILURES = new ParseField("total_snapshot_deletion_failures"); @SuppressWarnings("unchecked") private static final ConstructingObjectParser<SnapshotLifecycleStats, Void> PARSER = new ConstructingObjectParser<>("snapshot_policy_stats", true, a -> { long runs = (long) a[0]; long failed = (long) a[1]; long timedOut = (long) a[2]; long timeMs = (long) a[3]; Map<String, SnapshotPolicyStats> policyStatsMap = ((List<SnapshotPolicyStats>) a[4]).stream() .collect(Collectors.toMap(m -> m.policyId, Function.identity())); return new SnapshotLifecycleStats(runs, failed, timedOut, timeMs, policyStatsMap); }); static { PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_RUNS); PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_FAILED); PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_TIMED_OUT); PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_TIME_MILLIS); PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), SnapshotPolicyStats.PARSER, POLICY_STATS); } // Package visible for testing private SnapshotLifecycleStats(long retentionRuns, long retentionFailed, long retentionTimedOut, long retentionTimeMs, Map<String, SnapshotPolicyStats> policyStats) { this.retentionRunCount = retentionRuns; this.retentionFailedCount = retentionFailed; this.retentionTimedOut = retentionTimedOut; this.retentionTimeMs = retentionTimeMs; this.policyStats = policyStats; } public static SnapshotLifecycleStats parse(XContentParser parser) { return PARSER.apply(parser, null); } public long getRetentionRunCount() { return retentionRunCount; } public long getRetentionFailedCount() { return retentionFailedCount; } public long getRetentionTimedOut() { return retentionTimedOut; } public long getRetentionTimeMillis() { return retentionTimeMs; } /** * @return a map of per-policy stats for each SLM policy */ public Map<String, SnapshotPolicyStats> getMetrics() { return Collections.unmodifiableMap(this.policyStats); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(RETENTION_RUNS.getPreferredName(), this.retentionRunCount); builder.field(RETENTION_FAILED.getPreferredName(), this.retentionFailedCount); builder.field(RETENTION_TIMED_OUT.getPreferredName(), this.retentionTimedOut); TimeValue retentionTime = TimeValue.timeValueMillis(this.retentionTimeMs); builder.field(RETENTION_TIME.getPreferredName(), retentionTime); builder.field(RETENTION_TIME_MILLIS.getPreferredName(), retentionTime.millis()); Map<String, SnapshotPolicyStats> metrics = getMetrics(); long totalTaken = metrics.values().stream().mapToLong(s -> s.snapshotsTaken).sum(); long totalFailed = metrics.values().stream().mapToLong(s -> s.snapshotsFailed).sum(); long totalDeleted = metrics.values().stream().mapToLong(s -> s.snapshotsDeleted).sum(); long totalDeleteFailures = metrics.values().stream().mapToLong(s -> s.snapshotDeleteFailures).sum(); builder.field(TOTAL_TAKEN.getPreferredName(), totalTaken); builder.field(TOTAL_FAILED.getPreferredName(), totalFailed); builder.field(TOTAL_DELETIONS.getPreferredName(), totalDeleted); builder.field(TOTAL_DELETION_FAILURES.getPreferredName(), totalDeleteFailures); builder.startObject(POLICY_STATS.getPreferredName()); for (Map.Entry<String, SnapshotPolicyStats> policy : metrics.entrySet()) { SnapshotPolicyStats perPolicyMetrics = policy.getValue(); builder.startObject(perPolicyMetrics.policyId); perPolicyMetrics.toXContent(builder, params); builder.endObject(); } builder.endObject(); builder.endObject(); return builder; } @Override public int hashCode() { return Objects.hash(retentionRunCount, retentionFailedCount, retentionTimedOut, retentionTimeMs, policyStats); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (obj.getClass() != getClass()) { return false; } SnapshotLifecycleStats other = (SnapshotLifecycleStats) obj; return retentionRunCount == other.retentionRunCount && retentionFailedCount == other.retentionFailedCount && retentionTimedOut == other.retentionTimedOut && retentionTimeMs == other.retentionTimeMs && Objects.equals(policyStats, other.policyStats); } @Override public String toString() { return Strings.toString(this); } public static class SnapshotPolicyStats implements ToXContentFragment { private final String policyId; private final long snapshotsTaken; private final long snapshotsFailed; private final long snapshotsDeleted; private final long snapshotDeleteFailures; public static final ParseField POLICY_ID = new ParseField("policy"); static final ParseField SNAPSHOTS_TAKEN = new ParseField("snapshots_taken"); static final ParseField SNAPSHOTS_FAILED = new ParseField("snapshots_failed"); static final ParseField SNAPSHOTS_DELETED = new ParseField("snapshots_deleted"); static final ParseField SNAPSHOT_DELETION_FAILURES = new ParseField("snapshot_deletion_failures"); private static final ConstructingObjectParser<SnapshotPolicyStats, Void> PARSER = new ConstructingObjectParser<>("snapshot_policy_stats", true, a -> { String id = (String) a[0]; long taken = (long) a[1]; long failed = (long) a[2]; long deleted = (long) a[3]; long deleteFailed = (long) a[4]; return new SnapshotPolicyStats(id, taken, failed, deleted, deleteFailed); }); static { PARSER.declareString(ConstructingObjectParser.constructorArg(), POLICY_ID); PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOTS_TAKEN); PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOTS_FAILED); PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOTS_DELETED); PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOT_DELETION_FAILURES); } public SnapshotPolicyStats(String policyId, long snapshotsTaken, long snapshotsFailed, long deleted, long failedDeletes) { this.policyId = policyId; this.snapshotsTaken = snapshotsTaken; this.snapshotsFailed = snapshotsFailed; this.snapshotsDeleted = deleted; this.snapshotDeleteFailures = failedDeletes; } public static SnapshotPolicyStats parse(XContentParser parser, String policyId) { return PARSER.apply(parser, null); } public String getPolicyId() { return policyId; } public long getSnapshotsTaken() { return snapshotsTaken; } public long getSnapshotsFailed() { return snapshotsFailed; } public long getSnapshotsDeleted() { return snapshotsDeleted; } public long getSnapshotDeleteFailures() { return snapshotDeleteFailures; } @Override public int hashCode() { return Objects.hash(policyId, snapshotsTaken, snapshotsFailed, snapshotsDeleted, snapshotDeleteFailures); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (obj.getClass() != getClass()) { return false; } SnapshotPolicyStats other = (SnapshotPolicyStats) obj; return Objects.equals(policyId, other.policyId) && snapshotsTaken == other.snapshotsTaken && snapshotsFailed == other.snapshotsFailed && snapshotsDeleted == other.snapshotsDeleted && snapshotDeleteFailures == other.snapshotDeleteFailures; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(SnapshotPolicyStats.SNAPSHOTS_TAKEN.getPreferredName(), snapshotsTaken); builder.field(SnapshotPolicyStats.SNAPSHOTS_FAILED.getPreferredName(), snapshotsFailed); builder.field(SnapshotPolicyStats.SNAPSHOTS_DELETED.getPreferredName(), snapshotsDeleted); builder.field(SnapshotPolicyStats.SNAPSHOT_DELETION_FAILURES.getPreferredName(), snapshotDeleteFailures); return builder; } } }
gingerwizard/elasticsearch
client/rest-high-level/src/main/java/org/elasticsearch/client/slm/SnapshotLifecycleStats.java
Java
apache-2.0
11,928
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (c) 2011 OpenStack Foundation # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from __future__ import print_function import os import sys import tempfile try: import cStringIO as io BytesIO = io.StringIO except ImportError: import io BytesIO = io.BytesIO import fixtures import testscenarios from pbr import packaging from pbr.tests import base class SkipFileWrites(base.BaseTestCase): scenarios = [ ('changelog_option_true', dict(option_key='skip_changelog', option_value='True', env_key='SKIP_WRITE_GIT_CHANGELOG', env_value=None, pkg_func=packaging.write_git_changelog, filename='ChangeLog')), ('changelog_option_false', dict(option_key='skip_changelog', option_value='False', env_key='SKIP_WRITE_GIT_CHANGELOG', env_value=None, pkg_func=packaging.write_git_changelog, filename='ChangeLog')), ('changelog_env_true', dict(option_key='skip_changelog', option_value='False', env_key='SKIP_WRITE_GIT_CHANGELOG', env_value='True', pkg_func=packaging.write_git_changelog, filename='ChangeLog')), ('changelog_both_true', dict(option_key='skip_changelog', option_value='True', env_key='SKIP_WRITE_GIT_CHANGELOG', env_value='True', pkg_func=packaging.write_git_changelog, filename='ChangeLog')), ('authors_option_true', dict(option_key='skip_authors', option_value='True', env_key='SKIP_GENERATE_AUTHORS', env_value=None, pkg_func=packaging.generate_authors, filename='AUTHORS')), ('authors_option_false', dict(option_key='skip_authors', option_value='False', env_key='SKIP_GENERATE_AUTHORS', env_value=None, pkg_func=packaging.generate_authors, filename='AUTHORS')), ('authors_env_true', dict(option_key='skip_authors', option_value='False', env_key='SKIP_GENERATE_AUTHORS', env_value='True', pkg_func=packaging.generate_authors, filename='AUTHORS')), ('authors_both_true', dict(option_key='skip_authors', option_value='True', env_key='SKIP_GENERATE_AUTHORS', env_value='True', pkg_func=packaging.generate_authors, filename='AUTHORS')), ] def setUp(self): super(SkipFileWrites, self).setUp() self.temp_path = self.useFixture(fixtures.TempDir()).path self.root_dir = os.path.abspath(os.path.curdir) self.git_dir = os.path.join(self.root_dir, ".git") if not os.path.exists(self.git_dir): self.skipTest("%s is missing; skipping git-related checks" % self.git_dir) return self.filename = os.path.join(self.temp_path, self.filename) self.option_dict = dict() if self.option_key is not None: self.option_dict[self.option_key] = ('setup.cfg', self.option_value) self.useFixture( fixtures.EnvironmentVariable(self.env_key, self.env_value)) def test_skip(self): self.pkg_func(git_dir=self.git_dir, dest_dir=self.temp_path, option_dict=self.option_dict) self.assertEqual( not os.path.exists(self.filename), (self.option_value.lower() in packaging.TRUE_VALUES or self.env_value is not None)) _changelog_content = """04316fe (review/monty_taylor/27519) Make python 378261a Add an integration test script. 3c373ac (HEAD, tag: 2013.2.rc2, tag: 2013.2, milestone-proposed) Merge "Lib 182feb3 (tag: 0.5.17) Fix pip invocation for old versions of pip. fa4f46e (tag: 0.5.16) Remove explicit depend on distribute. d1c53dd Use pip instead of easy_install for installation. a793ea1 Merge "Skip git-checkout related tests when .git is missing" 6c27ce7 Skip git-checkout related tests when .git is missing 04984a5 Refactor hooks file. a65e8ee (tag: 0.5.14, tag: 0.5.13) Remove jinja pin. """ class GitLogsTest(base.BaseTestCase): def setUp(self): super(GitLogsTest, self).setUp() self.temp_path = self.useFixture(fixtures.TempDir()).path self.root_dir = os.path.abspath(os.path.curdir) self.git_dir = os.path.join(self.root_dir, ".git") self.useFixture( fixtures.EnvironmentVariable('SKIP_GENERATE_AUTHORS')) self.useFixture( fixtures.EnvironmentVariable('SKIP_WRITE_GIT_CHANGELOG')) def test_write_git_changelog(self): self.useFixture(fixtures.FakePopen(lambda _: { "stdout": BytesIO(_changelog_content.encode('utf-8')) })) packaging.write_git_changelog(git_dir=self.git_dir, dest_dir=self.temp_path) with open(os.path.join(self.temp_path, "ChangeLog"), "r") as ch_fh: changelog_contents = ch_fh.read() self.assertIn("2013.2", changelog_contents) self.assertIn("0.5.17", changelog_contents) self.assertIn("------", changelog_contents) self.assertIn("Refactor hooks file", changelog_contents) self.assertNotIn("Refactor hooks file.", changelog_contents) self.assertNotIn("182feb3", changelog_contents) self.assertNotIn("review/monty_taylor/27519", changelog_contents) self.assertNotIn("0.5.13", changelog_contents) self.assertNotIn('Merge "', changelog_contents) def test_generate_authors(self): author_old = u"Foo Foo <email@foo.com>" author_new = u"Bar Bar <email@bar.com>" co_author = u"Foo Bar <foo@bar.com>" co_author_by = u"Co-authored-by: " + co_author git_log_cmd = ( "git --git-dir=%s log --format=%%aN <%%aE>" % self.git_dir) git_co_log_cmd = ("git --git-dir=%s log" % self.git_dir) git_top_level = "git rev-parse --show-toplevel" cmd_map = { git_log_cmd: author_new, git_co_log_cmd: co_author_by, git_top_level: self.root_dir, } exist_files = [self.git_dir, os.path.join(self.temp_path, "AUTHORS.in")] self.useFixture(fixtures.MonkeyPatch( "os.path.exists", lambda path: os.path.abspath(path) in exist_files)) def _fake_run_shell_command(cmd, **kwargs): return cmd_map[" ".join(cmd)] self.useFixture(fixtures.MonkeyPatch( "pbr.packaging._run_shell_command", _fake_run_shell_command)) with open(os.path.join(self.temp_path, "AUTHORS.in"), "w") as auth_fh: auth_fh.write("%s\n" % author_old) packaging.generate_authors(git_dir=self.git_dir, dest_dir=self.temp_path) with open(os.path.join(self.temp_path, "AUTHORS"), "r") as auth_fh: authors = auth_fh.read() self.assertTrue(author_old in authors) self.assertTrue(author_new in authors) self.assertTrue(co_author in authors) class BuildSphinxTest(base.BaseTestCase): scenarios = [ ('true_autodoc_caps', dict(has_opt=True, autodoc='True', has_autodoc=True)), ('true_autodoc_lower', dict(has_opt=True, autodoc='true', has_autodoc=True)), ('false_autodoc', dict(has_opt=True, autodoc='False', has_autodoc=False)), ('no_autodoc', dict(has_opt=False, autodoc='False', has_autodoc=False)), ] def setUp(self): super(BuildSphinxTest, self).setUp() self.useFixture(fixtures.MonkeyPatch( "sphinx.setup_command.BuildDoc.run", lambda self: None)) from distutils import dist self.distr = dist.Distribution() self.distr.packages = ("fake_package",) self.distr.command_options["build_sphinx"] = { "source_dir": ["a", "."]} pkg_fixture = fixtures.PythonPackage( "fake_package", [("fake_module.py", b"")]) self.useFixture(pkg_fixture) self.useFixture(base.DiveDir(pkg_fixture.base)) def test_build_doc(self): if self.has_opt: self.distr.command_options["pbr"] = { "autodoc_index_modules": ('setup.cfg', self.autodoc)} build_doc = packaging.LocalBuildDoc(self.distr) build_doc.run() self.assertTrue( os.path.exists("api/autoindex.rst") == self.has_autodoc) self.assertTrue( os.path.exists( "api/fake_package.fake_module.rst") == self.has_autodoc) def test_builders_config(self): if self.has_opt: self.distr.command_options["pbr"] = { "autodoc_index_modules": ('setup.cfg', self.autodoc)} build_doc = packaging.LocalBuildDoc(self.distr) build_doc.finalize_options() self.assertEqual(2, len(build_doc.builders)) self.assertIn('html', build_doc.builders) self.assertIn('man', build_doc.builders) build_doc = packaging.LocalBuildDoc(self.distr) build_doc.builders = '' build_doc.finalize_options() self.assertEqual('', build_doc.builders) build_doc = packaging.LocalBuildDoc(self.distr) build_doc.builders = 'man' build_doc.finalize_options() self.assertEqual(1, len(build_doc.builders)) self.assertIn('man', build_doc.builders) build_doc = packaging.LocalBuildDoc(self.distr) build_doc.builders = 'html,man,doctest' build_doc.finalize_options() self.assertIn('html', build_doc.builders) self.assertIn('man', build_doc.builders) self.assertIn('doctest', build_doc.builders) class ParseRequirementsTest(base.BaseTestCase): def setUp(self): super(ParseRequirementsTest, self).setUp() (fd, self.tmp_file) = tempfile.mkstemp(prefix='openstack', suffix='.setup') def test_parse_requirements_normal(self): with open(self.tmp_file, 'w') as fh: fh.write("foo\nbar") self.assertEqual(['foo', 'bar'], packaging.parse_requirements([self.tmp_file])) def test_parse_requirements_with_git_egg_url(self): with open(self.tmp_file, 'w') as fh: fh.write("-e git://foo.com/zipball#egg=bar") self.assertEqual(['bar'], packaging.parse_requirements([self.tmp_file])) def test_parse_requirements_with_versioned_git_egg_url(self): with open(self.tmp_file, 'w') as fh: fh.write("-e git://foo.com/zipball#egg=bar-1.2.4") self.assertEqual(['bar>=1.2.4'], packaging.parse_requirements([self.tmp_file])) def test_parse_requirements_with_http_egg_url(self): with open(self.tmp_file, 'w') as fh: fh.write("https://foo.com/zipball#egg=bar") self.assertEqual(['bar'], packaging.parse_requirements([self.tmp_file])) def test_parse_requirements_with_versioned_http_egg_url(self): with open(self.tmp_file, 'w') as fh: fh.write("https://foo.com/zipball#egg=bar-4.2.1") self.assertEqual(['bar>=4.2.1'], packaging.parse_requirements([self.tmp_file])) def test_parse_requirements_removes_index_lines(self): with open(self.tmp_file, 'w') as fh: fh.write("-f foobar") self.assertEqual([], packaging.parse_requirements([self.tmp_file])) def test_parse_requirements_override_with_env(self): with open(self.tmp_file, 'w') as fh: fh.write("foo\nbar") self.useFixture( fixtures.EnvironmentVariable('PBR_REQUIREMENTS_FILES', self.tmp_file)) self.assertEqual(['foo', 'bar'], packaging.parse_requirements()) def test_parse_requirements_override_with_env_multiple_files(self): with open(self.tmp_file, 'w') as fh: fh.write("foo\nbar") self.useFixture( fixtures.EnvironmentVariable('PBR_REQUIREMENTS_FILES', "no-such-file," + self.tmp_file)) self.assertEqual(['foo', 'bar'], packaging.parse_requirements()) def test_get_requirement_from_file_empty(self): actual = packaging.get_reqs_from_files([]) self.assertEqual([], actual) def test_parse_requirements_with_comments(self): with open(self.tmp_file, 'w') as fh: fh.write("# this is a comment\nfoobar\n# and another one\nfoobaz") self.assertEqual(['foobar', 'foobaz'], packaging.parse_requirements([self.tmp_file])) def test_parse_requirements_python_version(self): with open("requirements-py%d.txt" % sys.version_info[0], "w") as fh: fh.write("# this is a comment\nfoobar\n# and another one\nfoobaz") self.assertEqual(['foobar', 'foobaz'], packaging.parse_requirements()) def test_parse_requirements_right_python_version(self): with open("requirements-py1.txt", "w") as fh: fh.write("thisisatrap") with open("requirements-py%d.txt" % sys.version_info[0], "w") as fh: fh.write("# this is a comment\nfoobar\n# and another one\nfoobaz") self.assertEqual(['foobar', 'foobaz'], packaging.parse_requirements()) class ParseDependencyLinksTest(base.BaseTestCase): def setUp(self): super(ParseDependencyLinksTest, self).setUp() (fd, self.tmp_file) = tempfile.mkstemp(prefix="openstack", suffix=".setup") def test_parse_dependency_normal(self): with open(self.tmp_file, "w") as fh: fh.write("http://test.com\n") self.assertEqual( ["http://test.com"], packaging.parse_dependency_links([self.tmp_file])) def test_parse_dependency_with_git_egg_url(self): with open(self.tmp_file, "w") as fh: fh.write("-e git://foo.com/zipball#egg=bar") self.assertEqual( ["git://foo.com/zipball#egg=bar"], packaging.parse_dependency_links([self.tmp_file])) def load_tests(loader, in_tests, pattern): return testscenarios.load_tests_apply_scenarios(loader, in_tests, pattern)
muzixing/ryu
pbr-0.10.0-py2.7.egg/pbr/tests/test_setup.py
Python
apache-2.0
15,144
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.core.ml.dataframe.evaluation.classification; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationFields; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationParameters; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.classification.Accuracy.Result; import java.io.IOException; import java.util.List; import static org.elasticsearch.test.hamcrest.TupleMatchers.isTuple; import static org.elasticsearch.xpack.core.ml.dataframe.evaluation.MockAggregations.mockCardinality; import static org.elasticsearch.xpack.core.ml.dataframe.evaluation.MockAggregations.mockFilters; import static org.elasticsearch.xpack.core.ml.dataframe.evaluation.MockAggregations.mockFiltersBucket; import static org.elasticsearch.xpack.core.ml.dataframe.evaluation.MockAggregations.mockSingleValue; import static org.elasticsearch.xpack.core.ml.dataframe.evaluation.MockAggregations.mockTerms; import static org.elasticsearch.xpack.core.ml.dataframe.evaluation.MockAggregations.mockTermsBucket; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; public class AccuracyTests extends AbstractSerializingTestCase<Accuracy> { private static final EvaluationParameters EVALUATION_PARAMETERS = new EvaluationParameters(100); private static final EvaluationFields EVALUATION_FIELDS = new EvaluationFields("foo", "bar", null, null, null, true); @Override protected Accuracy doParseInstance(XContentParser parser) throws IOException { return Accuracy.fromXContent(parser); } @Override protected Accuracy createTestInstance() { return createRandom(); } @Override protected Writeable.Reader<Accuracy> instanceReader() { return Accuracy::new; } @Override protected boolean supportsUnknownFields() { return true; } public static Accuracy createRandom() { return new Accuracy(); } public void testProcess() { Aggregations aggs = new Aggregations(List.of( mockTerms( "accuracy_" + MulticlassConfusionMatrix.STEP_1_AGGREGATE_BY_ACTUAL_CLASS, List.of( mockTermsBucket("dog", new Aggregations(List.of())), mockTermsBucket("cat", new Aggregations(List.of()))), 100L), mockCardinality("accuracy_" + MulticlassConfusionMatrix.STEP_1_CARDINALITY_OF_ACTUAL_CLASS, 1000L), mockFilters( "accuracy_" + MulticlassConfusionMatrix.STEP_2_AGGREGATE_BY_ACTUAL_CLASS, List.of( mockFiltersBucket( "dog", 30, new Aggregations(List.of(mockFilters( "accuracy_" + MulticlassConfusionMatrix.STEP_2_AGGREGATE_BY_PREDICTED_CLASS, List.of(mockFiltersBucket("cat", 10L), mockFiltersBucket("dog", 20L), mockFiltersBucket("_other_", 0L)))))), mockFiltersBucket( "cat", 70, new Aggregations(List.of(mockFilters( "accuracy_" + MulticlassConfusionMatrix.STEP_2_AGGREGATE_BY_PREDICTED_CLASS, List.of(mockFiltersBucket("cat", 30L), mockFiltersBucket("dog", 40L), mockFiltersBucket("_other_", 0L)))))))), mockSingleValue(Accuracy.OVERALL_ACCURACY_AGG_NAME, 0.5))); Accuracy accuracy = new Accuracy(); accuracy.process(aggs); assertThat(accuracy.aggs(EVALUATION_PARAMETERS, EVALUATION_FIELDS), isTuple(empty(), empty())); Result result = accuracy.getResult().get(); assertThat(result.getMetricName(), equalTo(Accuracy.NAME.getPreferredName())); assertThat( result.getClasses(), equalTo( List.of( new PerClassSingleValue("dog", 0.5), new PerClassSingleValue("cat", 0.5)))); assertThat(result.getOverallAccuracy(), equalTo(0.5)); } public void testProcess_GivenCardinalityTooHigh() { Aggregations aggs = new Aggregations(List.of( mockTerms( "accuracy_" + MulticlassConfusionMatrix.STEP_1_AGGREGATE_BY_ACTUAL_CLASS, List.of( mockTermsBucket("dog", new Aggregations(List.of())), mockTermsBucket("cat", new Aggregations(List.of()))), 100L), mockCardinality("accuracy_" + MulticlassConfusionMatrix.STEP_1_CARDINALITY_OF_ACTUAL_CLASS, 1001L), mockFilters( "accuracy_" + MulticlassConfusionMatrix.STEP_2_AGGREGATE_BY_ACTUAL_CLASS, List.of( mockFiltersBucket( "dog", 30, new Aggregations(List.of(mockFilters( "accuracy_" + MulticlassConfusionMatrix.STEP_2_AGGREGATE_BY_PREDICTED_CLASS, List.of(mockFiltersBucket("cat", 10L), mockFiltersBucket("dog", 20L), mockFiltersBucket("_other_", 0L)))))), mockFiltersBucket( "cat", 70, new Aggregations(List.of(mockFilters( "accuracy_" + MulticlassConfusionMatrix.STEP_2_AGGREGATE_BY_PREDICTED_CLASS, List.of(mockFiltersBucket("cat", 30L), mockFiltersBucket("dog", 40L), mockFiltersBucket("_other_", 0L)))))))), mockSingleValue(Accuracy.OVERALL_ACCURACY_AGG_NAME, 0.5))); Accuracy accuracy = new Accuracy(); accuracy.aggs(EVALUATION_PARAMETERS, EVALUATION_FIELDS); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> accuracy.process(aggs)); assertThat(e.getMessage(), containsString("Cardinality of field [foo] is too high")); } public void testComputePerClassAccuracy() { assertThat( Accuracy.computePerClassAccuracy( new MulticlassConfusionMatrix.Result( List.of( new MulticlassConfusionMatrix.ActualClass("A", 14, List.of( new MulticlassConfusionMatrix.PredictedClass("A", 1), new MulticlassConfusionMatrix.PredictedClass("B", 6), new MulticlassConfusionMatrix.PredictedClass("C", 4) ), 3L), new MulticlassConfusionMatrix.ActualClass("B", 20, List.of( new MulticlassConfusionMatrix.PredictedClass("A", 5), new MulticlassConfusionMatrix.PredictedClass("B", 3), new MulticlassConfusionMatrix.PredictedClass("C", 9) ), 3L), new MulticlassConfusionMatrix.ActualClass("C", 17, List.of( new MulticlassConfusionMatrix.PredictedClass("A", 8), new MulticlassConfusionMatrix.PredictedClass("B", 2), new MulticlassConfusionMatrix.PredictedClass("C", 7) ), 0L)), 0)), equalTo( List.of( new PerClassSingleValue("A", 25.0 / 51), // 13 false positives, 13 false negatives new PerClassSingleValue("B", 26.0 / 51), // 8 false positives, 17 false negatives new PerClassSingleValue("C", 28.0 / 51))) // 13 false positives, 10 false negatives ); } public void testComputePerClassAccuracy_OtherActualClassCountIsNonZero() { expectThrows(AssertionError.class, () -> Accuracy.computePerClassAccuracy(new MulticlassConfusionMatrix.Result(List.of(), 1))); } }
robin13/elasticsearch
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/AccuracyTests.java
Java
apache-2.0
8,475
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.watcher.condition; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.watcher.condition.ExecutableCondition; import java.time.Clock; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; public class AlwaysConditionTests extends ESTestCase { public void testExecute() throws Exception { ExecutableCondition alwaysTrue = InternalAlwaysCondition.INSTANCE; assertTrue(alwaysTrue.execute(null).met()); } public void testParserValid() throws Exception { XContentBuilder builder = jsonBuilder(); builder.startObject(); builder.endObject(); XContentParser parser = createParser(builder); parser.nextToken(); ExecutableCondition executable = InternalAlwaysCondition.parse("_id", parser); assertTrue(executable.execute(null).met()); } public void testParserInvalid() throws Exception { XContentBuilder builder = jsonBuilder() .startObject() .field("foo", "bar") .endObject(); XContentParser parser = createParser(builder); parser.nextToken(); try { InternalAlwaysCondition.parse( "_id", parser); fail("expected a condition exception trying to parse an invalid condition XContent, [" + InternalAlwaysCondition.TYPE + "] condition should not parse with a body"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), containsString("expected an empty object but found [foo]")); } } public static ExecutableCondition randomCondition(ScriptService scriptService) { String type = randomFrom(ScriptCondition.TYPE, InternalAlwaysCondition.TYPE, CompareCondition.TYPE, ArrayCompareCondition.TYPE); switch (type) { case ScriptCondition.TYPE: Script mockScript = mockScript("_script"); return new ScriptCondition(mockScript, scriptService); case CompareCondition.TYPE: return new CompareCondition("_path", randomFrom(CompareCondition.Op.values()), randomFrom(5, "3"), Clock.systemUTC()); case ArrayCompareCondition.TYPE: return new ArrayCompareCondition("_array_path", "_path", randomFrom(ArrayCompareCondition.Op.values()), randomFrom(5, "3"), ArrayCompareCondition.Quantifier.SOME, Clock.systemUTC()); default: return InternalAlwaysCondition.INSTANCE; } } }
robin13/elasticsearch
x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/AlwaysConditionTests.java
Java
apache-2.0
3,178
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.security.tests.java.security; import dalvik.annotation.TestTargetClass; import dalvik.annotation.TestTargets; import dalvik.annotation.TestLevel; import dalvik.annotation.TestTargetNew; import java.security.AllPermission; import java.security.Permission; import java.security.PermissionCollection; import java.security.SecurityPermission; import java.util.Enumeration; @TestTargetClass(AllPermission.class) public class AllPermission2Test extends junit.framework.TestCase { /** * @tests java.security.AllPermission#AllPermission() */ @TestTargetNew( level = TestLevel.COMPLETE, notes = "", method = "AllPermission", args = {} ) public void test_Constructor() { // Test for method java.security.AllPermission() AllPermission ap = new AllPermission(); assertEquals("Bogus name for AllPermission \"" + ap.getName() + "\".", "<all permissions>", ap.getName()); } /** * @tests java.security.AllPermission#AllPermission(java.lang.String, * java.lang.String) */ @TestTargetNew( level = TestLevel.PARTIAL, notes = "Null/empty parameters checking missed", method = "AllPermission", args = {java.lang.String.class, java.lang.String.class} ) public void test_ConstructorLjava_lang_StringLjava_lang_String() { // Test for method java.security.AllPermission(java.lang.String, // java.lang.String) AllPermission ap = new AllPermission("Don't remember this stupid name", "or this action"); assertEquals("Bogus name for AllPermission \"" + ap.getName() + "\".", "<all permissions>", ap.getName()); assertEquals( "AllPermission constructed with actions didn't ignore them.", "<all actions>", ap.getActions()); } /** * @tests java.security.AllPermission#equals(java.lang.Object) */ @TestTargetNew( level = TestLevel.COMPLETE, notes = "Update comment for first assertTrue method.Because: Two AllPermission objects are always equal", method = "equals", args = {java.lang.Object.class} ) public void test_equalsLjava_lang_Object() { // Test for method boolean // java.security.AllPermission.equals(java.lang.Object) assertTrue("Two AllPermissions not equal to each other.", new AllPermission().equals(new AllPermission())); assertTrue("AllPermission equals a SecurityPermission.", !(new AllPermission().equals(new SecurityPermission("ugh!")))); } /** * @tests java.security.AllPermission#getActions() */ @TestTargetNew( level = TestLevel.COMPLETE, notes = "", method = "getActions", args = {} ) public void test_getActions() { AllPermission ap = new AllPermission(); // Test for method java.lang.String // java.security.AllPermission.getActions() assertTrue("AllPermission has non-empty actions. (" + ap.getActions() + ")", ap.getActions().equals("<all actions>")); } /** * @tests java.security.AllPermission#hashCode() */ @TestTargetNew( level = TestLevel.COMPLETE, notes = "", method = "hashCode", args = {} ) public void test_hashCode() { final int ALLPERMISSION_HASH = 1; // Test for method int java.security.AllPermission.hashCode() AllPermission TestAllPermission = new AllPermission(); assertTrue("AllPermission hashCode is wrong. Should have been " + ALLPERMISSION_HASH + " but was " + TestAllPermission.hashCode(), TestAllPermission.hashCode() == ALLPERMISSION_HASH); } /** * @tests java.security.AllPermission#implies(java.security.Permission) */ @TestTargetNew( level = TestLevel.COMPLETE, notes = "", method = "implies", args = {java.security.Permission.class} ) public void test_impliesLjava_security_Permission() { // Test for method boolean // java.security.AllPermission.implies(java.security.Permission) assertTrue("AllPermission does not imply a AllPermission.", new AllPermission().implies(new AllPermission())); assertTrue("AllPermission does not imply a SecurityPermission.", new AllPermission().implies(new SecurityPermission("ugh!"))); assertTrue("SecurityPermission implies AllPermission.", !(new SecurityPermission("ugh!").implies(new AllPermission()))); assertTrue("AllPermission does not imply when parametr NULL", new AllPermission().implies(null)); } /** * @tests java.security.AllPermission#newPermissionCollection() */ @TestTargetNew( level = TestLevel.COMPLETE, notes = "", method = "newPermissionCollection", args = {} ) public void test_newPermissionCollection() { AllPermission ap1 = new AllPermission(); AllPermission ap2 = new AllPermission("Don't remember this stupid name", "or this action"); AllPermission ap3 = new AllPermission("Remember this cool name", "and this action"); PermissionCollection pc1 = ap1.newPermissionCollection(); assertFalse(pc1.isReadOnly()); Enumeration<Permission> perm1 = pc1.elements(); assertFalse(perm1.hasMoreElements()); assertNotNull(perm1); pc1.add(ap1); pc1.add(ap2); assertTrue("Should imply", pc1.implies(ap1)); assertTrue("Should imply", pc1.implies(ap2)); assertTrue("Should imply", pc1.implies(ap3)); perm1 = pc1.elements(); assertTrue(perm1.hasMoreElements()); PermissionCollection pc2 = ap2.newPermissionCollection(); assertFalse(pc2.isReadOnly()); Enumeration<Permission> perm2 = pc2.elements(); assertFalse(perm2.hasMoreElements()); assertNotNull(perm2); pc2.add(ap1); pc2.add(ap2); assertTrue("Should imply", pc2.implies(ap1)); assertTrue("Should imply", pc2.implies(ap2)); assertTrue("Should imply", pc2.implies(ap3)); perm2 = pc2.elements(); assertTrue(perm2.hasMoreElements()); } }
openweave/openweave-core
third_party/android/platform-libcore/android-platform-libcore/luni/src/test/java/org/apache/harmony/security/tests/java/security/AllPermission2Test.java
Java
apache-2.0
7,204
package org.osmdroid.samplefragments; public final class SampleFactory { private final BaseSampleFragment[] mSamples; private static SampleFactory _instance; public static SampleFactory getInstance() { if (_instance == null) { _instance = new SampleFactory(); } return _instance; } private SampleFactory() { mSamples = new BaseSampleFragment[] { new SampleWithMinimapItemizedOverlayWithFocus(), new SampleLimitedScrollArea(), new SampleFragmentXmlLayout(), new SampleOsmPath() }; } public BaseSampleFragment getSample(int index) { return mSamples[index]; } public int count() { return mSamples.length; } }
DT9/osmdroid
OpenStreetMapViewer/src/org/osmdroid/samplefragments/SampleFactory.java
Java
apache-2.0
642
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.tests.p2p; import org.apache.ignite.compute.*; import org.apache.ignite.internal.util.typedef.*; import org.jetbrains.annotations.*; import java.util.*; /** * Test task for {@code GridP2PContinuousDeploymentSelfTest}. */ public class GridP2PContinuousDeploymentTask2 extends ComputeTaskSplitAdapter<Object, Object> { /** {@inheritDoc} */ @Override protected Collection<? extends ComputeJob> split(int gridSize, Object arg) { return Collections.singleton(new ComputeJobAdapter() { @Override public Object execute() { X.println(">>> Executing GridP2PContinuousDeploymentTask2 job."); return null; } }); } /** {@inheritDoc} */ @Nullable @Override public Object reduce(List<ComputeJobResult> results) { return null; } }
akuznetsov-gridgain/ignite
modules/extdata/p2p/src/main/java/org/apache/ignite/tests/p2p/GridP2PContinuousDeploymentTask2.java
Java
apache-2.0
1,657
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.tools; import java.io.BufferedReader; import java.io.DataInput; import java.io.DataOutput; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.EnumSet; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Random; import java.util.Stack; import java.util.StringTokenizer; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FileChecksum; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Trash; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.protocol.QuotaExceededException; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.SequenceFile.Reader; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.io.SequenceFile.Writer; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.FileSplit; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.InputSplit; import org.apache.hadoop.mapred.InvalidInputException; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.Mapper; import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.RecordReader; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.SequenceFileRecordReader; import org.apache.hadoop.mapreduce.JobSubmissionFiles; import org.apache.hadoop.mapreduce.security.TokenCache; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix; /** * A Map-reduce program to recursively copy directories between * different file-systems. */ @Deprecated public class DistCpV1 implements Tool { public static final Log LOG = LogFactory.getLog(DistCpV1.class); private static final String NAME = "distcp"; private static final String usage = NAME + " [OPTIONS] <srcurl>* <desturl>" + "\n\nOPTIONS:" + "\n-p[rbugpt] Preserve status" + "\n r: replication number" + "\n b: block size" + "\n u: user" + "\n g: group" + "\n p: permission" + "\n t: modification and access times" + "\n -p alone is equivalent to -prbugpt" + "\n-i Ignore failures" + "\n-basedir <basedir> Use <basedir> as the base directory when copying files from <srcurl>" + "\n-log <logdir> Write logs to <logdir>" + "\n-m <num_maps> Maximum number of simultaneous copies" + "\n-overwrite Overwrite destination" + "\n-update Overwrite if src size different from dst size" + "\n-skipcrccheck Do not use CRC check to determine if src is " + "\n different from dest. Relevant only if -update" + "\n is specified" + "\n-f <urilist_uri> Use list at <urilist_uri> as src list" + "\n-filelimit <n> Limit the total number of files to be <= n" + "\n-sizelimit <n> Limit the total size to be <= n bytes" + "\n-delete Delete the files existing in the dst but not in src" + "\n-dryrun Display count of files and total size of files" + "\n in src and then exit. Copy is not done at all." + "\n desturl should not be speicified with out -update." + "\n-mapredSslConf <f> Filename of SSL configuration for mapper task" + "\n\nNOTE 1: if -overwrite or -update are set, each source URI is " + "\n interpreted as an isomorphic update to an existing directory." + "\nFor example:" + "\nhadoop " + NAME + " -p -update \"hdfs://A:8020/user/foo/bar\" " + "\"hdfs://B:8020/user/foo/baz\"\n" + "\n would update all descendants of 'baz' also in 'bar'; it would " + "\n *not* update /user/foo/baz/bar" + "\n\nNOTE 2: The parameter <n> in -filelimit and -sizelimit can be " + "\n specified with symbolic representation. For examples," + "\n 1230k = 1230 * 1024 = 1259520" + "\n 891g = 891 * 1024^3 = 956703965184" + "\n"; private static final long BYTES_PER_MAP = 256 * 1024 * 1024; private static final int MAX_MAPS_PER_NODE = 20; private static final int SYNC_FILE_MAX = 10; private static final int DEFAULT_FILE_RETRIES = 3; static enum Counter { COPY, SKIP, FAIL, BYTESCOPIED, BYTESEXPECTED } static enum Options { DELETE("-delete", NAME + ".delete"), FILE_LIMIT("-filelimit", NAME + ".limit.file"), SIZE_LIMIT("-sizelimit", NAME + ".limit.size"), IGNORE_READ_FAILURES("-i", NAME + ".ignore.read.failures"), PRESERVE_STATUS("-p", NAME + ".preserve.status"), OVERWRITE("-overwrite", NAME + ".overwrite.always"), UPDATE("-update", NAME + ".overwrite.ifnewer"), SKIPCRC("-skipcrccheck", NAME + ".skip.crc.check"); final String cmd, propertyname; private Options(String cmd, String propertyname) { this.cmd = cmd; this.propertyname = propertyname; } private long parseLong(String[] args, int offset) { if (offset == args.length) { throw new IllegalArgumentException("<n> not specified in " + cmd); } long n = StringUtils.TraditionalBinaryPrefix.string2long(args[offset]); if (n <= 0) { throw new IllegalArgumentException("n = " + n + " <= 0 in " + cmd); } return n; } } static enum FileAttribute { BLOCK_SIZE, REPLICATION, USER, GROUP, PERMISSION, TIMES; final char symbol; private FileAttribute() { symbol = StringUtils.toLowerCase(toString()).charAt(0); } static EnumSet<FileAttribute> parse(String s) { if (s == null || s.length() == 0) { return EnumSet.allOf(FileAttribute.class); } EnumSet<FileAttribute> set = EnumSet.noneOf(FileAttribute.class); FileAttribute[] attributes = values(); for(char c : s.toCharArray()) { int i = 0; for(; i < attributes.length && c != attributes[i].symbol; i++); if (i < attributes.length) { if (!set.contains(attributes[i])) { set.add(attributes[i]); } else { throw new IllegalArgumentException("There are more than one '" + attributes[i].symbol + "' in " + s); } } else { throw new IllegalArgumentException("'" + c + "' in " + s + " is undefined."); } } return set; } } static final String TMP_DIR_LABEL = NAME + ".tmp.dir"; static final String DST_DIR_LABEL = NAME + ".dest.path"; static final String JOB_DIR_LABEL = NAME + ".job.dir"; static final String MAX_MAPS_LABEL = NAME + ".max.map.tasks"; static final String SRC_LIST_LABEL = NAME + ".src.list"; static final String SRC_COUNT_LABEL = NAME + ".src.count"; static final String TOTAL_SIZE_LABEL = NAME + ".total.size"; static final String DST_DIR_LIST_LABEL = NAME + ".dst.dir.list"; static final String BYTES_PER_MAP_LABEL = NAME + ".bytes.per.map"; static final String PRESERVE_STATUS_LABEL = Options.PRESERVE_STATUS.propertyname + ".value"; static final String FILE_RETRIES_LABEL = NAME + ".file.retries"; private JobConf conf; public void setConf(Configuration conf) { if (conf instanceof JobConf) { this.conf = (JobConf) conf; } else { this.conf = new JobConf(conf); } } public Configuration getConf() { return conf; } public DistCpV1(Configuration conf) { setConf(conf); } /** * An input/output pair of filenames. */ static class FilePair implements Writable { FileStatus input = new FileStatus(); String output; FilePair() { } FilePair(FileStatus input, String output) { this.input = input; this.output = output; } public void readFields(DataInput in) throws IOException { input.readFields(in); output = Text.readString(in); } public void write(DataOutput out) throws IOException { input.write(out); Text.writeString(out, output); } public String toString() { return input + " : " + output; } } /** * InputFormat of a distcp job responsible for generating splits of the src * file list. */ static class CopyInputFormat implements InputFormat<Text, Text> { /** * Produce splits such that each is no greater than the quotient of the * total size and the number of splits requested. * @param job The handle to the JobConf object * @param numSplits Number of splits requested */ public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException { int cnfiles = job.getInt(SRC_COUNT_LABEL, -1); long cbsize = job.getLong(TOTAL_SIZE_LABEL, -1); String srcfilelist = job.get(SRC_LIST_LABEL, ""); if (cnfiles < 0 || cbsize < 0 || "".equals(srcfilelist)) { throw new RuntimeException("Invalid metadata: #files(" + cnfiles + ") total_size(" + cbsize + ") listuri(" + srcfilelist + ")"); } Path src = new Path(srcfilelist); FileSystem fs = src.getFileSystem(job); FileStatus srcst = fs.getFileStatus(src); ArrayList<FileSplit> splits = new ArrayList<FileSplit>(numSplits); LongWritable key = new LongWritable(); FilePair value = new FilePair(); final long targetsize = cbsize / numSplits; long pos = 0L; long last = 0L; long acc = 0L; long cbrem = srcst.getLen(); try (SequenceFile.Reader sl = new SequenceFile.Reader(job, Reader.file(src))) { for (; sl.next(key, value); last = sl.getPosition()) { // if adding this split would put this split past the target size, // cut the last split and put this next file in the next split. if (acc + key.get() > targetsize && acc != 0) { long splitsize = last - pos; splits.add(new FileSplit(src, pos, splitsize, (String[])null)); cbrem -= splitsize; pos = last; acc = 0L; } acc += key.get(); } } if (cbrem != 0) { splits.add(new FileSplit(src, pos, cbrem, (String[])null)); } return splits.toArray(new FileSplit[splits.size()]); } /** * Returns a reader for this split of the src file list. */ public RecordReader<Text, Text> getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException { return new SequenceFileRecordReader<Text, Text>(job, (FileSplit)split); } } /** * FSCopyFilesMapper: The mapper for copying files between FileSystems. */ static class CopyFilesMapper implements Mapper<LongWritable, FilePair, WritableComparable<?>, Text> { // config private int sizeBuf = 128 * 1024; private FileSystem destFileSys = null; private boolean ignoreReadFailures; private boolean preserve_status; private EnumSet<FileAttribute> preseved; private boolean overwrite; private boolean update; private Path destPath = null; private byte[] buffer = null; private JobConf job; private boolean skipCRCCheck = false; // stats private int failcount = 0; private int skipcount = 0; private int copycount = 0; private String getCountString() { return "Copied: " + copycount + " Skipped: " + skipcount + " Failed: " + failcount; } private void updateStatus(Reporter reporter) { reporter.setStatus(getCountString()); } /** * Return true if dst should be replaced by src and the update flag is set. * Right now, this merely checks that the src and dst len are not equal. * This should be improved on once modification times, CRCs, etc. can * be meaningful in this context. * @throws IOException */ private boolean needsUpdate(FileStatus srcstatus, FileSystem dstfs, Path dstpath) throws IOException { return update && !sameFile(srcstatus.getPath().getFileSystem(job), srcstatus, dstfs, dstpath, skipCRCCheck); } private FSDataOutputStream create(Path f, Reporter reporter, FileStatus srcstat) throws IOException { if (destFileSys.exists(f)) { destFileSys.delete(f, false); } if (!preserve_status) { return destFileSys.create(f, true, sizeBuf, reporter); } FsPermission permission = preseved.contains(FileAttribute.PERMISSION)? srcstat.getPermission(): null; short replication = preseved.contains(FileAttribute.REPLICATION)? srcstat.getReplication(): destFileSys.getDefaultReplication(f); long blockSize = preseved.contains(FileAttribute.BLOCK_SIZE)? srcstat.getBlockSize(): destFileSys.getDefaultBlockSize(f); return destFileSys.create(f, permission, true, sizeBuf, replication, blockSize, reporter); } /** * Validates copy by checking the sizes of files first and then * checksums, if the filesystems support checksums. * @param srcstat src path and metadata * @param absdst dst path * @return true if src & destination files are same */ private boolean validateCopy(FileStatus srcstat, Path absdst) throws IOException { if (destFileSys.exists(absdst)) { if (sameFile(srcstat.getPath().getFileSystem(job), srcstat, destFileSys, absdst, skipCRCCheck)) { return true; } } return false; } /** * Increment number of files copied and bytes copied and then report status */ void updateCopyStatus(FileStatus srcstat, Reporter reporter) { copycount++; reporter.incrCounter(Counter.BYTESCOPIED, srcstat.getLen()); reporter.incrCounter(Counter.COPY, 1); updateStatus(reporter); } /** * Skip copying this file if already exists at the destination. * Updates counters and copy status if skipping this file. * @return true if copy of this file can be skipped */ private boolean skipCopyFile(FileStatus srcstat, Path absdst, OutputCollector<WritableComparable<?>, Text> outc, Reporter reporter) throws IOException { if (destFileSys.exists(absdst) && !overwrite && !needsUpdate(srcstat, destFileSys, absdst)) { outc.collect(null, new Text("SKIP: " + srcstat.getPath())); ++skipcount; reporter.incrCounter(Counter.SKIP, 1); updateStatus(reporter); return true; } return false; } /** * Copies single file to the path specified by tmpfile. * @param srcstat src path and metadata * @param tmpfile temporary file to which copy is to be done * @param absdst actual destination path to which copy is to be done * @param reporter * @return Number of bytes copied */ private long doCopyFile(FileStatus srcstat, Path tmpfile, Path absdst, Reporter reporter) throws IOException { long bytesCopied = 0L; Path srcPath = srcstat.getPath(); // open src file try (FSDataInputStream in = srcPath.getFileSystem(job).open(srcPath)) { reporter.incrCounter(Counter.BYTESEXPECTED, srcstat.getLen()); // open tmp file try (FSDataOutputStream out = create(tmpfile, reporter, srcstat)) { LOG.info("Copying file " + srcPath + " of size " + srcstat.getLen() + " bytes..."); // copy file for(int bytesRead; (bytesRead = in.read(buffer)) >= 0; ) { out.write(buffer, 0, bytesRead); bytesCopied += bytesRead; reporter.setStatus( String.format("%.2f ", bytesCopied*100.0/srcstat.getLen()) + absdst + " [ " + TraditionalBinaryPrefix.long2String(bytesCopied, "", 1) + " / " + TraditionalBinaryPrefix.long2String(srcstat.getLen(), "", 1) + " ]"); } } } return bytesCopied; } /** * Copy a file to a destination. * @param srcstat src path and metadata * @param relativedst relative dst path * @param outc Log of skipped files * @param reporter * @throws IOException if copy fails(even if the validation of copy fails) */ private void copy(FileStatus srcstat, Path relativedst, OutputCollector<WritableComparable<?>, Text> outc, Reporter reporter) throws IOException { Path absdst = new Path(destPath, relativedst); int totfiles = job.getInt(SRC_COUNT_LABEL, -1); assert totfiles >= 0 : "Invalid file count " + totfiles; if (totfiles == 1) { // Copying a single file; use dst path provided by user as // destination file rather than destination directory Path dstparent = absdst.getParent(); if (!(destFileSys.exists(dstparent) && destFileSys.getFileStatus(dstparent).isDirectory())) { absdst = dstparent; } } // if a directory, ensure created even if empty if (srcstat.isDirectory()) { if (destFileSys.exists(absdst)) { if (destFileSys.getFileStatus(absdst).isFile()) { throw new IOException("Failed to mkdirs: " + absdst+" is a file."); } } else if (!destFileSys.mkdirs(absdst)) { throw new IOException("Failed to mkdirs " + absdst); } // TODO: when modification times can be set, directories should be // emitted to reducers so they might be preserved. Also, mkdirs does // not currently return an error when the directory already exists; // if this changes, all directory work might as well be done in reduce return; } // Can we skip copying this file ? if (skipCopyFile(srcstat, absdst, outc, reporter)) { return; } Path tmpfile = new Path(job.get(TMP_DIR_LABEL), relativedst); // do the actual copy to tmpfile long bytesCopied = doCopyFile(srcstat, tmpfile, absdst, reporter); if (bytesCopied != srcstat.getLen()) { throw new IOException("File size not matched: copied " + bytesString(bytesCopied) + " to tmpfile (=" + tmpfile + ") but expected " + bytesString(srcstat.getLen()) + " from " + srcstat.getPath()); } else { if (destFileSys.exists(absdst) && destFileSys.getFileStatus(absdst).isDirectory()) { throw new IOException(absdst + " is a directory"); } if (!destFileSys.mkdirs(absdst.getParent())) { throw new IOException("Failed to create parent dir: " + absdst.getParent()); } rename(tmpfile, absdst); if (!validateCopy(srcstat, absdst)) { destFileSys.delete(absdst, false); throw new IOException("Validation of copy of file " + srcstat.getPath() + " failed."); } updateDestStatus(srcstat, destFileSys.getFileStatus(absdst)); } // report at least once for each file updateCopyStatus(srcstat, reporter); } /** rename tmp to dst, delete dst if already exists */ private void rename(Path tmp, Path dst) throws IOException { try { if (destFileSys.exists(dst)) { destFileSys.delete(dst, true); } if (!destFileSys.rename(tmp, dst)) { throw new IOException(); } } catch(IOException cause) { throw (IOException)new IOException("Fail to rename tmp file (=" + tmp + ") to destination file (=" + dst + ")").initCause(cause); } } private void updateDestStatus(FileStatus src, FileStatus dst ) throws IOException { if (preserve_status) { DistCpV1.updateDestStatus(src, dst, preseved, destFileSys); } } static String bytesString(long b) { return b + " bytes (" + TraditionalBinaryPrefix.long2String(b, "", 1) + ")"; } /** * Copies a file and validates the copy by checking the checksums. * If validation fails, retries (max number of tries is distcp.file.retries) * to copy the file. */ void copyWithRetries(FileStatus srcstat, Path relativedst, OutputCollector<WritableComparable<?>, Text> out, Reporter reporter) throws IOException { // max tries to copy when validation of copy fails final int maxRetries = job.getInt(FILE_RETRIES_LABEL, DEFAULT_FILE_RETRIES); // save update flag for later copies within the same map task final boolean saveUpdate = update; int retryCnt = 1; for (; retryCnt <= maxRetries; retryCnt++) { try { //copy the file and validate copy copy(srcstat, relativedst, out, reporter); break;// copy successful } catch (IOException e) { LOG.warn("Copy of " + srcstat.getPath() + " failed.", e); if (retryCnt < maxRetries) {// copy failed and need to retry LOG.info("Retrying copy of file " + srcstat.getPath()); update = true; // set update flag for retries } else {// no more retries... Give up update = saveUpdate; throw new IOException("Copy of file failed even with " + retryCnt + " tries.", e); } } } } /** Mapper configuration. * Extracts source and destination file system, as well as * top-level paths on source and destination directories. * Gets the named file systems, to be used later in map. */ public void configure(JobConf job) { destPath = new Path(job.get(DST_DIR_LABEL, "/")); try { destFileSys = destPath.getFileSystem(job); } catch (IOException ex) { throw new RuntimeException("Unable to get the named file system.", ex); } sizeBuf = job.getInt("copy.buf.size", 128 * 1024); buffer = new byte[sizeBuf]; ignoreReadFailures = job.getBoolean(Options.IGNORE_READ_FAILURES.propertyname, false); preserve_status = job.getBoolean(Options.PRESERVE_STATUS.propertyname, false); if (preserve_status) { preseved = FileAttribute.parse(job.get(PRESERVE_STATUS_LABEL)); } update = job.getBoolean(Options.UPDATE.propertyname, false); overwrite = !update && job.getBoolean(Options.OVERWRITE.propertyname, false); skipCRCCheck = job.getBoolean(Options.SKIPCRC.propertyname, false); this.job = job; } /** Map method. Copies one file from source file system to destination. * @param key src len * @param value FilePair (FileStatus src, Path dst) * @param out Log of failed copies * @param reporter */ public void map(LongWritable key, FilePair value, OutputCollector<WritableComparable<?>, Text> out, Reporter reporter) throws IOException { final FileStatus srcstat = value.input; final Path relativedst = new Path(value.output); try { copyWithRetries(srcstat, relativedst, out, reporter); } catch (IOException e) { ++failcount; reporter.incrCounter(Counter.FAIL, 1); updateStatus(reporter); final String sfailure = "FAIL " + relativedst + " : " + StringUtils.stringifyException(e); out.collect(null, new Text(sfailure)); LOG.info(sfailure); if (e instanceof FileNotFoundException) { final String s = "Possible Cause for failure: Either the filesystem " + srcstat.getPath().getFileSystem(job) + " is not accessible or the file is deleted"; LOG.error(s); out.collect(null, new Text(s)); } try { for (int i = 0; i < 3; ++i) { try { final Path tmp = new Path(job.get(TMP_DIR_LABEL), relativedst); if (destFileSys.delete(tmp, true)) break; } catch (Throwable ex) { // ignore, we are just cleaning up LOG.debug("Ignoring cleanup exception", ex); } // update status, so we don't get timed out updateStatus(reporter); Thread.sleep(3 * 1000); } } catch (InterruptedException inte) { throw (IOException)new IOException().initCause(inte); } } finally { updateStatus(reporter); } } public void close() throws IOException { if (0 == failcount || ignoreReadFailures) { return; } throw new IOException(getCountString()); } } private static List<Path> fetchFileList(Configuration conf, Path srcList) throws IOException { List<Path> result = new ArrayList<Path>(); FileSystem fs = srcList.getFileSystem(conf); try (BufferedReader input = new BufferedReader(new InputStreamReader(fs.open(srcList), Charset.forName("UTF-8")))) { String line = input.readLine(); while (line != null) { result.add(new Path(line)); line = input.readLine(); } } return result; } @Deprecated public static void copy(Configuration conf, String srcPath, String destPath, Path logPath, boolean srcAsList, boolean ignoreReadFailures) throws IOException { final Path src = new Path(srcPath); List<Path> tmp = new ArrayList<Path>(); if (srcAsList) { tmp.addAll(fetchFileList(conf, src)); } else { tmp.add(src); } EnumSet<Options> flags = ignoreReadFailures ? EnumSet.of(Options.IGNORE_READ_FAILURES) : EnumSet.noneOf(Options.class); final Path dst = new Path(destPath); copy(conf, new Arguments(tmp, null, dst, logPath, flags, null, Long.MAX_VALUE, Long.MAX_VALUE, null, false)); } /** Sanity check for srcPath */ private static void checkSrcPath(JobConf jobConf, List<Path> srcPaths) throws IOException { List<IOException> rslt = new ArrayList<IOException>(); List<Path> unglobbed = new LinkedList<Path>(); Path[] ps = new Path[srcPaths.size()]; ps = srcPaths.toArray(ps); TokenCache.obtainTokensForNamenodes(jobConf.getCredentials(), ps, jobConf); for (Path p : srcPaths) { FileSystem fs = p.getFileSystem(jobConf); FileStatus[] inputs = fs.globStatus(p); if(inputs != null && inputs.length > 0) { for (FileStatus onePath: inputs) { unglobbed.add(onePath.getPath()); } } else { rslt.add(new IOException("Input source " + p + " does not exist.")); } } if (!rslt.isEmpty()) { throw new InvalidInputException(rslt); } srcPaths.clear(); srcPaths.addAll(unglobbed); } /** * Driver to copy srcPath to destPath depending on required protocol. * @param conf configuration * @param args arguments */ static void copy(final Configuration conf, final Arguments args ) throws IOException { LOG.info("srcPaths=" + args.srcs); if (!args.dryrun || args.flags.contains(Options.UPDATE)) { LOG.info("destPath=" + args.dst); } JobConf job = createJobConf(conf); checkSrcPath(job, args.srcs); if (args.preservedAttributes != null) { job.set(PRESERVE_STATUS_LABEL, args.preservedAttributes); } if (args.mapredSslConf != null) { job.set("dfs.https.client.keystore.resource", args.mapredSslConf); } //Initialize the mapper try { if (setup(conf, job, args)) { JobClient.runJob(job); } if(!args.dryrun) { finalize(conf, job, args.dst, args.preservedAttributes); } } finally { if (!args.dryrun) { //delete tmp fullyDelete(job.get(TMP_DIR_LABEL), job); } //delete jobDirectory fullyDelete(job.get(JOB_DIR_LABEL), job); } } private static void updateDestStatus(FileStatus src, FileStatus dst, EnumSet<FileAttribute> preseved, FileSystem destFileSys ) throws IOException { String owner = null; String group = null; if (preseved.contains(FileAttribute.USER) && !src.getOwner().equals(dst.getOwner())) { owner = src.getOwner(); } if (preseved.contains(FileAttribute.GROUP) && !src.getGroup().equals(dst.getGroup())) { group = src.getGroup(); } if (owner != null || group != null) { destFileSys.setOwner(dst.getPath(), owner, group); } if (preseved.contains(FileAttribute.PERMISSION) && !src.getPermission().equals(dst.getPermission())) { destFileSys.setPermission(dst.getPath(), src.getPermission()); } if (preseved.contains(FileAttribute.TIMES)) { destFileSys.setTimes(dst.getPath(), src.getModificationTime(), src.getAccessTime()); } } static private void finalize(Configuration conf, JobConf jobconf, final Path destPath, String presevedAttributes) throws IOException { if (presevedAttributes == null) { return; } EnumSet<FileAttribute> preseved = FileAttribute.parse(presevedAttributes); if (!preseved.contains(FileAttribute.USER) && !preseved.contains(FileAttribute.GROUP) && !preseved.contains(FileAttribute.PERMISSION)) { return; } FileSystem dstfs = destPath.getFileSystem(conf); Path dstdirlist = new Path(jobconf.get(DST_DIR_LIST_LABEL)); try (SequenceFile.Reader in = new SequenceFile.Reader(jobconf, Reader.file(dstdirlist))) { Text dsttext = new Text(); FilePair pair = new FilePair(); for(; in.next(dsttext, pair); ) { Path absdst = new Path(destPath, pair.output); updateDestStatus(pair.input, dstfs.getFileStatus(absdst), preseved, dstfs); } } } static class Arguments { final List<Path> srcs; final Path basedir; final Path dst; final Path log; final EnumSet<Options> flags; final String preservedAttributes; final long filelimit; final long sizelimit; final String mapredSslConf; final boolean dryrun; /** * Arguments for distcp * @param srcs List of source paths * @param basedir Base directory for copy * @param dst Destination path * @param log Log output directory * @param flags Command-line flags * @param preservedAttributes Preserved attributes * @param filelimit File limit * @param sizelimit Size limit * @param mapredSslConf ssl configuration * @param dryrun */ Arguments(List<Path> srcs, Path basedir, Path dst, Path log, EnumSet<Options> flags, String preservedAttributes, long filelimit, long sizelimit, String mapredSslConf, boolean dryrun) { this.srcs = srcs; this.basedir = basedir; this.dst = dst; this.log = log; this.flags = flags; this.preservedAttributes = preservedAttributes; this.filelimit = filelimit; this.sizelimit = sizelimit; this.mapredSslConf = mapredSslConf; this.dryrun = dryrun; if (LOG.isTraceEnabled()) { LOG.trace("this = " + this); } } static Arguments valueOf(String[] args, Configuration conf ) throws IOException { List<Path> srcs = new ArrayList<Path>(); Path dst = null; Path log = null; Path basedir = null; EnumSet<Options> flags = EnumSet.noneOf(Options.class); String presevedAttributes = null; String mapredSslConf = null; long filelimit = Long.MAX_VALUE; long sizelimit = Long.MAX_VALUE; boolean dryrun = false; for (int idx = 0; idx < args.length; idx++) { Options[] opt = Options.values(); int i = 0; for(; i < opt.length && !args[idx].startsWith(opt[i].cmd); i++); if (i < opt.length) { flags.add(opt[i]); if (opt[i] == Options.PRESERVE_STATUS) { presevedAttributes = args[idx].substring(2); FileAttribute.parse(presevedAttributes); //validation } else if (opt[i] == Options.FILE_LIMIT) { filelimit = Options.FILE_LIMIT.parseLong(args, ++idx); } else if (opt[i] == Options.SIZE_LIMIT) { sizelimit = Options.SIZE_LIMIT.parseLong(args, ++idx); } } else if ("-f".equals(args[idx])) { if (++idx == args.length) { throw new IllegalArgumentException("urilist_uri not specified in -f"); } srcs.addAll(fetchFileList(conf, new Path(args[idx]))); } else if ("-log".equals(args[idx])) { if (++idx == args.length) { throw new IllegalArgumentException("logdir not specified in -log"); } log = new Path(args[idx]); } else if ("-basedir".equals(args[idx])) { if (++idx == args.length) { throw new IllegalArgumentException("basedir not specified in -basedir"); } basedir = new Path(args[idx]); } else if ("-mapredSslConf".equals(args[idx])) { if (++idx == args.length) { throw new IllegalArgumentException("ssl conf file not specified in -mapredSslConf"); } mapredSslConf = args[idx]; } else if ("-dryrun".equals(args[idx])) { dryrun = true; dst = new Path("/tmp/distcp_dummy_dest");//dummy destination } else if ("-m".equals(args[idx])) { if (++idx == args.length) { throw new IllegalArgumentException("num_maps not specified in -m"); } try { conf.setInt(MAX_MAPS_LABEL, Integer.parseInt(args[idx])); } catch (NumberFormatException e) { throw new IllegalArgumentException("Invalid argument to -m: " + args[idx]); } } else if ('-' == args[idx].codePointAt(0)) { throw new IllegalArgumentException("Invalid switch " + args[idx]); } else if (idx == args.length -1 && (!dryrun || flags.contains(Options.UPDATE))) { dst = new Path(args[idx]); } else { srcs.add(new Path(args[idx])); } } // mandatory command-line parameters if (srcs.isEmpty() || dst == null) { throw new IllegalArgumentException("Missing " + (dst == null ? "dst path" : "src")); } // incompatible command-line flags final boolean isOverwrite = flags.contains(Options.OVERWRITE); final boolean isUpdate = flags.contains(Options.UPDATE); final boolean isDelete = flags.contains(Options.DELETE); final boolean skipCRC = flags.contains(Options.SKIPCRC); if (isOverwrite && isUpdate) { throw new IllegalArgumentException("Conflicting overwrite policies"); } if (!isUpdate && skipCRC) { throw new IllegalArgumentException( Options.SKIPCRC.cmd + " is relevant only with the " + Options.UPDATE.cmd + " option"); } if (isDelete && !isOverwrite && !isUpdate) { throw new IllegalArgumentException(Options.DELETE.cmd + " must be specified with " + Options.OVERWRITE + " or " + Options.UPDATE + "."); } return new Arguments(srcs, basedir, dst, log, flags, presevedAttributes, filelimit, sizelimit, mapredSslConf, dryrun); } /** {@inheritDoc} */ public String toString() { return getClass().getName() + "{" + "\n srcs = " + srcs + "\n dst = " + dst + "\n log = " + log + "\n flags = " + flags + "\n preservedAttributes = " + preservedAttributes + "\n filelimit = " + filelimit + "\n sizelimit = " + sizelimit + "\n mapredSslConf = " + mapredSslConf + "\n}"; } } /** * This is the main driver for recursively copying directories * across file systems. It takes at least two cmdline parameters. A source * URL and a destination URL. It then essentially does an "ls -lR" on the * source URL, and writes the output in a round-robin manner to all the map * input files. The mapper actually copies the files allotted to it. The * reduce is empty. */ public int run(String[] args) { try { copy(conf, Arguments.valueOf(args, conf)); return 0; } catch (IllegalArgumentException e) { System.err.println(StringUtils.stringifyException(e) + "\n" + usage); ToolRunner.printGenericCommandUsage(System.err); return -1; } catch (DuplicationException e) { System.err.println(StringUtils.stringifyException(e)); return DuplicationException.ERROR_CODE; } catch (RemoteException e) { final IOException unwrapped = e.unwrapRemoteException( FileNotFoundException.class, AccessControlException.class, QuotaExceededException.class); System.err.println(StringUtils.stringifyException(unwrapped)); return -3; } catch (Exception e) { System.err.println("With failures, global counters are inaccurate; " + "consider running with -i"); System.err.println("Copy failed: " + StringUtils.stringifyException(e)); return -999; } } public static void main(String[] args) throws Exception { JobConf job = new JobConf(DistCpV1.class); DistCpV1 distcp = new DistCpV1(job); int res = ToolRunner.run(distcp, args); System.exit(res); } /** * Make a path relative with respect to a root path. * absPath is always assumed to descend from root. * Otherwise returned path is null. */ static String makeRelative(Path root, Path absPath) { if (!absPath.isAbsolute()) { throw new IllegalArgumentException("!absPath.isAbsolute(), absPath=" + absPath); } String p = absPath.toUri().getPath(); StringTokenizer pathTokens = new StringTokenizer(p, "/"); for(StringTokenizer rootTokens = new StringTokenizer( root.toUri().getPath(), "/"); rootTokens.hasMoreTokens(); ) { if (!rootTokens.nextToken().equals(pathTokens.nextToken())) { return null; } } StringBuilder sb = new StringBuilder(); for(; pathTokens.hasMoreTokens(); ) { sb.append(pathTokens.nextToken()); if (pathTokens.hasMoreTokens()) { sb.append(Path.SEPARATOR); } } return sb.length() == 0? ".": sb.toString(); } /** * Calculate how many maps to run. * Number of maps is bounded by a minimum of the cumulative size of the * copy / (distcp.bytes.per.map, default BYTES_PER_MAP or -m on the * command line) and at most (distcp.max.map.tasks, default * MAX_MAPS_PER_NODE * nodes in the cluster). * @param totalBytes Count of total bytes for job * @param job The job to configure * @return Count of maps to run. */ private static int setMapCount(long totalBytes, JobConf job) throws IOException { int numMaps = (int)(totalBytes / job.getLong(BYTES_PER_MAP_LABEL, BYTES_PER_MAP)); numMaps = Math.min(numMaps, job.getInt(MAX_MAPS_LABEL, MAX_MAPS_PER_NODE * new JobClient(job).getClusterStatus().getTaskTrackers())); numMaps = Math.max(numMaps, 1); job.setNumMapTasks(numMaps); return numMaps; } /** Fully delete dir */ static void fullyDelete(String dir, Configuration conf) throws IOException { if (dir != null) { Path tmp = new Path(dir); boolean success = tmp.getFileSystem(conf).delete(tmp, true); if (!success) { LOG.warn("Could not fully delete " + tmp); } } } //Job configuration private static JobConf createJobConf(Configuration conf) { JobConf jobconf = new JobConf(conf, DistCpV1.class); jobconf.setJobName(conf.get("mapred.job.name", NAME)); // turn off speculative execution, because DFS doesn't handle // multiple writers to the same file. jobconf.setMapSpeculativeExecution(false); jobconf.setInputFormat(CopyInputFormat.class); jobconf.setOutputKeyClass(Text.class); jobconf.setOutputValueClass(Text.class); jobconf.setMapperClass(CopyFilesMapper.class); jobconf.setNumReduceTasks(0); return jobconf; } private static final Random RANDOM = new Random(); public static String getRandomId() { return Integer.toString(RANDOM.nextInt(Integer.MAX_VALUE), 36); } /** * Increase the replication factor of _distcp_src_files to * sqrt(min(maxMapsOnCluster, numMaps)). This is to reduce the chance of * failing of distcp because of "not having a replication of _distcp_src_files * available for reading for some maps". */ private static void setReplication(Configuration conf, JobConf jobConf, Path srcfilelist, int numMaps) throws IOException { int numMaxMaps = new JobClient(jobConf).getClusterStatus().getMaxMapTasks(); short replication = (short) Math.ceil( Math.sqrt(Math.min(numMaxMaps, numMaps))); FileSystem fs = srcfilelist.getFileSystem(conf); FileStatus srcStatus = fs.getFileStatus(srcfilelist); if (srcStatus.getReplication() < replication) { if (!fs.setReplication(srcfilelist, replication)) { throw new IOException("Unable to increase the replication of file " + srcfilelist); } } } /** * Does the dir already exist at destination ? * @return true if the dir already exists at destination */ private static boolean dirExists(Configuration conf, Path dst) throws IOException { FileSystem destFileSys = dst.getFileSystem(conf); FileStatus status = null; try { status = destFileSys.getFileStatus(dst); }catch (FileNotFoundException e) { return false; } if (status.isFile()) { throw new FileAlreadyExistsException("Not a dir: " + dst+" is a file."); } return true; } /** * Initialize DFSCopyFileMapper specific job-configuration. * @param conf : The dfs/mapred configuration. * @param jobConf : The handle to the jobConf object to be initialized. * @param args Arguments * @return true if it is necessary to launch a job. */ static boolean setup(Configuration conf, JobConf jobConf, final Arguments args) throws IOException { jobConf.set(DST_DIR_LABEL, args.dst.toUri().toString()); //set boolean values final boolean update = args.flags.contains(Options.UPDATE); final boolean skipCRCCheck = args.flags.contains(Options.SKIPCRC); final boolean overwrite = !update && args.flags.contains(Options.OVERWRITE) && !args.dryrun; jobConf.setBoolean(Options.UPDATE.propertyname, update); jobConf.setBoolean(Options.SKIPCRC.propertyname, skipCRCCheck); jobConf.setBoolean(Options.OVERWRITE.propertyname, overwrite); jobConf.setBoolean(Options.IGNORE_READ_FAILURES.propertyname, args.flags.contains(Options.IGNORE_READ_FAILURES)); jobConf.setBoolean(Options.PRESERVE_STATUS.propertyname, args.flags.contains(Options.PRESERVE_STATUS)); final String randomId = getRandomId(); JobClient jClient = new JobClient(jobConf); Path stagingArea; try { stagingArea = JobSubmissionFiles.getStagingDir(jClient.getClusterHandle(), conf); } catch (InterruptedException ie) { throw new IOException(ie); } Path jobDirectory = new Path(stagingArea + NAME + "_" + randomId); FsPermission mapredSysPerms = new FsPermission(JobSubmissionFiles.JOB_DIR_PERMISSION); FileSystem.mkdirs(jClient.getFs(), jobDirectory, mapredSysPerms); jobConf.set(JOB_DIR_LABEL, jobDirectory.toString()); long maxBytesPerMap = conf.getLong(BYTES_PER_MAP_LABEL, BYTES_PER_MAP); FileSystem dstfs = args.dst.getFileSystem(conf); // get tokens for all the required FileSystems.. TokenCache.obtainTokensForNamenodes(jobConf.getCredentials(), new Path[] {args.dst}, conf); boolean dstExists = dstfs.exists(args.dst); boolean dstIsDir = false; if (dstExists) { dstIsDir = dstfs.getFileStatus(args.dst).isDirectory(); } // default logPath Path logPath = args.log; if (logPath == null) { String filename = "_distcp_logs_" + randomId; if (!dstExists || !dstIsDir) { Path parent = args.dst.getParent(); if (null == parent) { // If dst is '/' on S3, it might not exist yet, but dst.getParent() // will return null. In this case, use '/' as its own parent to prevent // NPE errors below. parent = args.dst; } if (!dstfs.exists(parent)) { dstfs.mkdirs(parent); } logPath = new Path(parent, filename); } else { logPath = new Path(args.dst, filename); } } FileOutputFormat.setOutputPath(jobConf, logPath); // create src list, dst list FileSystem jobfs = jobDirectory.getFileSystem(jobConf); Path srcfilelist = new Path(jobDirectory, "_distcp_src_files"); Path dstfilelist = new Path(jobDirectory, "_distcp_dst_files"); Path dstdirlist = new Path(jobDirectory, "_distcp_dst_dirs"); jobConf.set(SRC_LIST_LABEL, srcfilelist.toString()); jobConf.set(DST_DIR_LIST_LABEL, dstdirlist.toString()); int srcCount = 0, cnsyncf = 0, dirsyn = 0; long fileCount = 0L, dirCount = 0L, byteCount = 0L, cbsyncs = 0L, skipFileCount = 0L, skipByteCount = 0L; try ( SequenceFile.Writer src_writer = SequenceFile.createWriter(jobConf, Writer.file(srcfilelist), Writer.keyClass(LongWritable.class), Writer.valueClass(FilePair.class), Writer.compression( SequenceFile.CompressionType.NONE)); SequenceFile.Writer dst_writer = SequenceFile.createWriter(jobConf, Writer.file(dstfilelist), Writer.keyClass(Text.class), Writer.valueClass(Text.class), Writer.compression( SequenceFile.CompressionType.NONE)); SequenceFile.Writer dir_writer = SequenceFile.createWriter(jobConf, Writer.file(dstdirlist), Writer.keyClass(Text.class), Writer.valueClass(FilePair.class), Writer.compression( SequenceFile.CompressionType.NONE)); ) { // handle the case where the destination directory doesn't exist // and we've only a single src directory OR we're updating/overwriting // the contents of the destination directory. final boolean special = (args.srcs.size() == 1 && !dstExists) || update || overwrite; Path basedir = null; HashSet<Path> parentDirsToCopy = new HashSet<Path>(); if (args.basedir != null) { FileSystem basefs = args.basedir.getFileSystem(conf); basedir = args.basedir.makeQualified( basefs.getUri(), basefs.getWorkingDirectory()); if (!basefs.isDirectory(basedir)) { throw new IOException("Basedir " + basedir + " is not a directory."); } } for(Iterator<Path> srcItr = args.srcs.iterator(); srcItr.hasNext(); ) { final Path src = srcItr.next(); FileSystem srcfs = src.getFileSystem(conf); FileStatus srcfilestat = srcfs.getFileStatus(src); Path root = special && srcfilestat.isDirectory()? src: src.getParent(); if (dstExists && !dstIsDir && (args.srcs.size() > 1 || srcfilestat.isDirectory())) { // destination should not be a file throw new IOException("Destination " + args.dst + " should be a dir" + " if multiple source paths are there OR if" + " the source path is a dir"); } if (basedir != null) { root = basedir; Path parent = src.getParent().makeQualified( srcfs.getUri(), srcfs.getWorkingDirectory()); while (parent != null && !parent.equals(basedir)) { if (!parentDirsToCopy.contains(parent)){ parentDirsToCopy.add(parent); String dst = makeRelative(root, parent); FileStatus pst = srcfs.getFileStatus(parent); src_writer.append(new LongWritable(0), new FilePair(pst, dst)); dst_writer.append(new Text(dst), new Text(parent.toString())); dir_writer.append(new Text(dst), new FilePair(pst, dst)); if (++dirsyn > SYNC_FILE_MAX) { dirsyn = 0; dir_writer.sync(); } } parent = parent.getParent(); } if (parent == null) { throw new IOException("Basedir " + basedir + " is not a prefix of source path " + src); } } if (srcfilestat.isDirectory()) { ++srcCount; final String dst = makeRelative(root,src); if (!update || !dirExists(conf, new Path(args.dst, dst))) { ++dirCount; src_writer.append(new LongWritable(0), new FilePair(srcfilestat, dst)); } dst_writer.append(new Text(dst), new Text(src.toString())); } Stack<FileStatus> pathstack = new Stack<FileStatus>(); for(pathstack.push(srcfilestat); !pathstack.empty(); ) { FileStatus cur = pathstack.pop(); FileStatus[] children = srcfs.listStatus(cur.getPath()); for(int i = 0; i < children.length; i++) { boolean skipPath = false; final FileStatus child = children[i]; final String dst = makeRelative(root, child.getPath()); ++srcCount; if (child.isDirectory()) { pathstack.push(child); if (!update || !dirExists(conf, new Path(args.dst, dst))) { ++dirCount; } else { skipPath = true; // skip creating dir at destination } } else { Path destPath = new Path(args.dst, dst); if (cur.isFile() && (args.srcs.size() == 1)) { // Copying a single file; use dst path provided by user as // destination file rather than destination directory Path dstparent = destPath.getParent(); FileSystem destFileSys = destPath.getFileSystem(jobConf); if (!(destFileSys.exists(dstparent) && destFileSys.getFileStatus(dstparent).isDirectory())) { destPath = dstparent; } } //skip path if the src and the dst files are the same. skipPath = update && sameFile(srcfs, child, dstfs, destPath, skipCRCCheck); //skip path if it exceed file limit or size limit skipPath |= fileCount == args.filelimit || byteCount + child.getLen() > args.sizelimit; if (!skipPath) { ++fileCount; byteCount += child.getLen(); if (LOG.isTraceEnabled()) { LOG.trace("adding file " + child.getPath()); } ++cnsyncf; cbsyncs += child.getLen(); if (cnsyncf > SYNC_FILE_MAX || cbsyncs > maxBytesPerMap) { src_writer.sync(); dst_writer.sync(); cnsyncf = 0; cbsyncs = 0L; } } else { ++skipFileCount; skipByteCount += child.getLen(); if (LOG.isTraceEnabled()) { LOG.trace("skipping file " + child.getPath()); } } } if (!skipPath) { src_writer.append(new LongWritable(child.isDirectory()? 0: child.getLen()), new FilePair(child, dst)); } dst_writer.append(new Text(dst), new Text(child.getPath().toString())); } if (cur.isDirectory()) { String dst = makeRelative(root, cur.getPath()); dir_writer.append(new Text(dst), new FilePair(cur, dst)); if (++dirsyn > SYNC_FILE_MAX) { dirsyn = 0; dir_writer.sync(); } } } } } LOG.info("sourcePathsCount(files+directories)=" + srcCount); LOG.info("filesToCopyCount=" + fileCount); LOG.info("bytesToCopyCount=" + TraditionalBinaryPrefix.long2String(byteCount, "", 1)); if (update) { LOG.info("filesToSkipCopyCount=" + skipFileCount); LOG.info("bytesToSkipCopyCount=" + TraditionalBinaryPrefix.long2String(skipByteCount, "", 1)); } if (args.dryrun) { return false; } int mapCount = setMapCount(byteCount, jobConf); // Increase the replication of _distcp_src_files, if needed setReplication(conf, jobConf, srcfilelist, mapCount); FileStatus dststatus = null; try { dststatus = dstfs.getFileStatus(args.dst); } catch(FileNotFoundException fnfe) { LOG.info(args.dst + " does not exist."); } // create dest path dir if copying > 1 file if (dststatus == null) { if (srcCount > 1 && !dstfs.mkdirs(args.dst)) { throw new IOException("Failed to create" + args.dst); } } final Path sorted = new Path(jobDirectory, "_distcp_sorted"); checkDuplication(jobfs, dstfilelist, sorted, conf); if (dststatus != null && args.flags.contains(Options.DELETE)) { long deletedPathsCount = deleteNonexisting(dstfs, dststatus, sorted, jobfs, jobDirectory, jobConf, conf); LOG.info("deletedPathsFromDestCount(files+directories)=" + deletedPathsCount); } Path tmpDir = new Path( (dstExists && !dstIsDir) || (!dstExists && srcCount == 1)? args.dst.getParent(): args.dst, "_distcp_tmp_" + randomId); jobConf.set(TMP_DIR_LABEL, tmpDir.toUri().toString()); // Explicitly create the tmpDir to ensure that it can be cleaned // up by fullyDelete() later. tmpDir.getFileSystem(conf).mkdirs(tmpDir); LOG.info("sourcePathsCount=" + srcCount); LOG.info("filesToCopyCount=" + fileCount); LOG.info("bytesToCopyCount=" + TraditionalBinaryPrefix.long2String(byteCount, "", 1)); jobConf.setInt(SRC_COUNT_LABEL, srcCount); jobConf.setLong(TOTAL_SIZE_LABEL, byteCount); return (fileCount + dirCount) > 0; } /** * Check whether the contents of src and dst are the same. * * Return false if dstpath does not exist * * If the files have different sizes, return false. * * If the files have the same sizes, the file checksums will be compared. * * When file checksum is not supported in any of file systems, * two files are considered as the same if they have the same size. */ static private boolean sameFile(FileSystem srcfs, FileStatus srcstatus, FileSystem dstfs, Path dstpath, boolean skipCRCCheck) throws IOException { FileStatus dststatus; try { dststatus = dstfs.getFileStatus(dstpath); } catch(FileNotFoundException fnfe) { return false; } //same length? if (srcstatus.getLen() != dststatus.getLen()) { return false; } if (skipCRCCheck) { LOG.debug("Skipping the CRC check"); return true; } //get src checksum final FileChecksum srccs; try { srccs = srcfs.getFileChecksum(srcstatus.getPath()); } catch(FileNotFoundException fnfe) { /* * Two possible cases: * (1) src existed once but was deleted between the time period that * srcstatus was obtained and the try block above. * (2) srcfs does not support file checksum and (incorrectly) throws * FNFE, e.g. some previous versions of HftpFileSystem. * For case (1), it is okay to return true since src was already deleted. * For case (2), true should be returned. */ return true; } //compare checksums try { final FileChecksum dstcs = dstfs.getFileChecksum(dststatus.getPath()); //return true if checksum is not supported //(i.e. some of the checksums is null) return srccs == null || dstcs == null || srccs.equals(dstcs); } catch(FileNotFoundException fnfe) { return false; } } /** * Delete the dst files/dirs which do not exist in src * * @return total count of files and directories deleted from destination * @throws IOException */ static private long deleteNonexisting( FileSystem dstfs, FileStatus dstroot, Path dstsorted, FileSystem jobfs, Path jobdir, JobConf jobconf, Configuration conf ) throws IOException { if (dstroot.isFile()) { throw new IOException("dst must be a directory when option " + Options.DELETE.cmd + " is set, but dst (= " + dstroot.getPath() + ") is not a directory."); } //write dst lsr results final Path dstlsr = new Path(jobdir, "_distcp_dst_lsr"); try (final SequenceFile.Writer writer = SequenceFile.createWriter(jobconf, Writer.file(dstlsr), Writer.keyClass(Text.class), Writer.valueClass(NullWritable.class), Writer.compression( SequenceFile.CompressionType.NONE))) { //do lsr to get all file statuses in dstroot final Stack<FileStatus> lsrstack = new Stack<FileStatus>(); for(lsrstack.push(dstroot); !lsrstack.isEmpty(); ) { final FileStatus status = lsrstack.pop(); if (status.isDirectory()) { for(FileStatus child : dstfs.listStatus(status.getPath())) { String relative = makeRelative(dstroot.getPath(), child.getPath()); writer.append(new Text(relative), NullWritable.get()); lsrstack.push(child); } } } } //sort lsr results final Path sortedlsr = new Path(jobdir, "_distcp_dst_lsr_sorted"); SequenceFile.Sorter sorter = new SequenceFile.Sorter(jobfs, new Text.Comparator(), Text.class, NullWritable.class, jobconf); sorter.sort(dstlsr, sortedlsr); //compare lsr list and dst list long deletedPathsCount = 0; try (SequenceFile.Reader lsrin = new SequenceFile.Reader(jobconf, Reader.file(sortedlsr)); SequenceFile.Reader dstin = new SequenceFile.Reader(jobconf, Reader.file(dstsorted))) { //compare sorted lsr list and sorted dst list final Text lsrpath = new Text(); final Text dstpath = new Text(); final Text dstfrom = new Text(); final Trash trash = new Trash(dstfs, conf); Path lastpath = null; boolean hasnext = dstin.next(dstpath, dstfrom); while (lsrin.next(lsrpath, NullWritable.get())) { int dst_cmp_lsr = dstpath.compareTo(lsrpath); while (hasnext && dst_cmp_lsr < 0) { hasnext = dstin.next(dstpath, dstfrom); dst_cmp_lsr = dstpath.compareTo(lsrpath); } if (dst_cmp_lsr == 0) { //lsrpath exists in dst, skip it hasnext = dstin.next(dstpath, dstfrom); } else { //lsrpath does not exist, delete it final Path rmpath = new Path(dstroot.getPath(), lsrpath.toString()); ++deletedPathsCount; if ((lastpath == null || !isAncestorPath(lastpath, rmpath))) { if (!(trash.moveToTrash(rmpath) || dstfs.delete(rmpath, true))) { throw new IOException("Failed to delete " + rmpath); } lastpath = rmpath; } } } } return deletedPathsCount; } //is x an ancestor path of y? static private boolean isAncestorPath(Path xp, Path yp) { final String x = xp.toString(); final String y = yp.toString(); if (!y.startsWith(x)) { return false; } final int len = x.length(); return y.length() == len || y.charAt(len) == Path.SEPARATOR_CHAR; } /** Check whether the file list have duplication. */ static private void checkDuplication(FileSystem fs, Path file, Path sorted, Configuration conf) throws IOException { SequenceFile.Sorter sorter = new SequenceFile.Sorter(fs, new Text.Comparator(), Text.class, Text.class, conf); sorter.sort(file, sorted); try (SequenceFile.Reader in = new SequenceFile.Reader(conf, Reader.file(sorted))) { Text prevdst = null, curdst = new Text(); Text prevsrc = null, cursrc = new Text(); for(; in.next(curdst, cursrc); ) { if (prevdst != null && curdst.equals(prevdst)) { throw new DuplicationException( "Invalid input, there are duplicated files in the sources: " + prevsrc + ", " + cursrc); } prevdst = curdst; curdst = new Text(); prevsrc = cursrc; cursrc = new Text(); } } } /** An exception class for duplicated source files. */ public static class DuplicationException extends IOException { private static final long serialVersionUID = 1L; /** Error code for this exception */ public static final int ERROR_CODE = -2; DuplicationException(String message) {super(message);} } }
robzor92/hops
hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java
Java
apache-2.0
63,413
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.operators; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.api.java.typeutils.runtime.RuntimePairComparatorFactory; import org.apache.flink.runtime.operators.testutils.DelayingIterator; import org.apache.flink.runtime.operators.testutils.DiscardingOutputCollector; import org.apache.flink.runtime.operators.testutils.ExpectedTestException; import org.apache.flink.runtime.operators.testutils.InfiniteIntTupleIterator; import org.apache.flink.runtime.operators.testutils.UniformIntTupleGenerator; import org.apache.flink.shaded.guava30.com.google.common.base.Throwables; import org.junit.Assert; import org.junit.Test; import java.util.concurrent.atomic.AtomicReference; import static org.junit.Assert.assertFalse; import static org.junit.Assert.fail; public class LeftOuterJoinTaskTest extends AbstractOuterJoinTaskTest { private static final long HASH_MEM = 6 * 1024 * 1024; private final double hash_frac; public LeftOuterJoinTaskTest(ExecutionConfig config) { super(config); hash_frac = (double) HASH_MEM / this.getMemoryManager().getMemorySize(); } @Override protected int calculateExpectedCount(int keyCnt1, int valCnt1, int keyCnt2, int valCnt2) { return valCnt1 * valCnt2 * Math.min(keyCnt1, keyCnt2) + (keyCnt1 > keyCnt2 ? (keyCnt1 - keyCnt2) * valCnt1 : 0); } @Override protected DriverStrategy getSortDriverStrategy() { return DriverStrategy.LEFT_OUTER_MERGE; } @Override protected AbstractOuterJoinDriver< Tuple2<Integer, Integer>, Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> getOuterJoinDriver() { return new LeftOuterJoinDriver<>(); } @Test public void testHash1LeftOuterJoinTask() throws Exception { final int keyCnt1 = 20; final int valCnt1 = 1; final int keyCnt2 = 10; final int valCnt2 = 2; testHashLeftOuterJoinTask(keyCnt1, valCnt1, keyCnt2, valCnt2); } @Test public void testHash2LeftOuterJoinTask() throws Exception { final int keyCnt1 = 20; final int valCnt1 = 1; final int keyCnt2 = 20; final int valCnt2 = 1; testHashLeftOuterJoinTask(keyCnt1, valCnt1, keyCnt2, valCnt2); } @Test public void testHash3LeftOuterJoinTask() throws Exception { int keyCnt1 = 20; int valCnt1 = 1; int keyCnt2 = 20; int valCnt2 = 20; testHashLeftOuterJoinTask(keyCnt1, valCnt1, keyCnt2, valCnt2); } @Test public void testHash4LeftOuterJoinTask() throws Exception { int keyCnt1 = 20; int valCnt1 = 20; int keyCnt2 = 20; int valCnt2 = 1; testHashLeftOuterJoinTask(keyCnt1, valCnt1, keyCnt2, valCnt2); } @Test public void testHash5LeftOuterJoinTask() throws Exception { int keyCnt1 = 20; int valCnt1 = 20; int keyCnt2 = 20; int valCnt2 = 20; testHashLeftOuterJoinTask(keyCnt1, valCnt1, keyCnt2, valCnt2); } @Test public void testHash6LeftOuterJoinTask() throws Exception { int keyCnt1 = 10; int valCnt1 = 1; int keyCnt2 = 20; int valCnt2 = 2; testHashLeftOuterJoinTask(keyCnt1, valCnt1, keyCnt2, valCnt2); } private void testHashLeftOuterJoinTask(int keyCnt1, int valCnt1, int keyCnt2, int valCnt2) throws Exception { setOutput(this.outList, this.serializer); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(new RuntimePairComparatorFactory()); getTaskConfig().setDriverStrategy(DriverStrategy.LEFT_HYBRIDHASH_BUILD_SECOND); getTaskConfig().setRelativeMemoryDriver(hash_frac); final AbstractOuterJoinDriver< Tuple2<Integer, Integer>, Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> testTask = getOuterJoinDriver(); addInput(new UniformIntTupleGenerator(keyCnt1, valCnt1, false), this.serializer); addInput(new UniformIntTupleGenerator(keyCnt2, valCnt2, false), this.serializer); testDriver(testTask, MockJoinStub.class); final int expCnt = calculateExpectedCount(keyCnt1, valCnt1, keyCnt2, valCnt2); Assert.assertTrue( "Result set size was " + this.outList.size() + ". Expected was " + expCnt, this.outList.size() == expCnt); this.outList.clear(); } @Test(expected = ExpectedTestException.class) public void testFailingHashLeftOuterJoinTask() throws Exception { int keyCnt1 = 20; int valCnt1 = 20; int keyCnt2 = 20; int valCnt2 = 20; setOutput(new DiscardingOutputCollector<Tuple2<Integer, Integer>>()); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(new RuntimePairComparatorFactory()); getTaskConfig().setDriverStrategy(DriverStrategy.LEFT_HYBRIDHASH_BUILD_SECOND); getTaskConfig().setRelativeMemoryDriver(this.hash_frac); final AbstractOuterJoinDriver< Tuple2<Integer, Integer>, Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> testTask = getOuterJoinDriver(); addInput(new UniformIntTupleGenerator(keyCnt1, valCnt1, true), this.serializer); addInput(new UniformIntTupleGenerator(keyCnt2, valCnt2, true), this.serializer); testDriver(testTask, MockFailingJoinStub.class); } @Test public void testCancelLeftOuterJoinTaskWhileBuilding() throws Exception { setOutput(new DiscardingOutputCollector<Tuple2<Integer, Integer>>()); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(new RuntimePairComparatorFactory()); getTaskConfig().setDriverStrategy(DriverStrategy.LEFT_HYBRIDHASH_BUILD_SECOND); getTaskConfig().setRelativeMemoryDriver(this.hash_frac); final AbstractOuterJoinDriver< Tuple2<Integer, Integer>, Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> testTask = getOuterJoinDriver(); addInput(new UniformIntTupleGenerator(100, 100, true), this.serializer); addInput(new DelayingIterator<>(new InfiniteIntTupleIterator(), 100), this.serializer); final AtomicReference<Throwable> error = new AtomicReference<>(); final Thread taskRunner = new Thread("Task runner for testCancelOuterJoinTaskWhileSort1()") { @Override public void run() { try { testDriver(testTask, MockJoinStub.class); } catch (Throwable t) { error.set(t); } } }; taskRunner.start(); Thread.sleep(1000); cancel(); taskRunner.join(60000); assertFalse("Task thread did not finish within 60 seconds", taskRunner.isAlive()); final Throwable taskError = error.get(); if (taskError != null) { fail("Error in task while canceling:\n" + Throwables.getStackTraceAsString(taskError)); } } @Test public void testCancelLeftOuterJoinTaskWhileProbing() throws Exception { setOutput(new DiscardingOutputCollector<Tuple2<Integer, Integer>>()); addDriverComparator(this.comparator1); addDriverComparator(this.comparator2); getTaskConfig().setDriverPairComparator(new RuntimePairComparatorFactory()); getTaskConfig().setDriverStrategy(DriverStrategy.LEFT_HYBRIDHASH_BUILD_SECOND); getTaskConfig().setRelativeMemoryDriver(this.hash_frac); final AbstractOuterJoinDriver< Tuple2<Integer, Integer>, Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> testTask = getOuterJoinDriver(); addInput(new DelayingIterator<>(new InfiniteIntTupleIterator(), 100), this.serializer); addInput(new UniformIntTupleGenerator(1, 1, true), this.serializer); final AtomicReference<Throwable> error = new AtomicReference<>(); final Thread taskRunner = new Thread("Task runner for testCancelOuterJoinTaskWhileSort1()") { @Override public void run() { try { testDriver(testTask, MockJoinStub.class); } catch (Throwable t) { error.set(t); } } }; taskRunner.start(); Thread.sleep(1000); cancel(); taskRunner.join(60000); assertFalse("Task thread did not finish within 60 seconds", taskRunner.isAlive()); final Throwable taskError = error.get(); if (taskError != null) { fail("Error in task while canceling:\n" + Throwables.getStackTraceAsString(taskError)); } } }
apache/flink
flink-runtime/src/test/java/org/apache/flink/runtime/operators/LeftOuterJoinTaskTest.java
Java
apache-2.0
10,266
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.gora; import java.util.Map; import org.apache.camel.Endpoint; import org.apache.camel.Exchange; import org.apache.camel.ServicePoolAware; import org.apache.camel.impl.DefaultProducer; import org.apache.gora.persistency.Persistent; import org.apache.gora.store.DataStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.apache.camel.component.gora.utils.GoraUtils.constractQueryFromPropertiesMap; import static org.apache.camel.component.gora.utils.GoraUtils.getKeyFromExchange; import static org.apache.camel.component.gora.utils.GoraUtils.getValueFromExchange; /** * Camel-Gora {@link DefaultProducer}. */ public class GoraProducer extends DefaultProducer implements ServicePoolAware { /** * logger */ private static final Logger LOG = LoggerFactory.getLogger(GoraProducer.class); /** * Camel-Gora endpoint configuration */ private final GoraConfiguration configuration; /** * GORA datastore */ private final DataStore<Object, Persistent> dataStore; /** * Constructor * * @param endpoint Reference to the Camel-Gora endpoint * @param configuration Reference to Camel-Gora endpoint configuration * @param dataStore Reference to the datastore */ public GoraProducer(final Endpoint endpoint, final GoraConfiguration configuration, final DataStore<Object, Persistent> dataStore) { super(endpoint); this.dataStore = dataStore; this.configuration = configuration; } /** * {@inheritDoc} */ @Override public void process(final Exchange exchange) throws Exception { final String operation = (String) exchange.getIn().getHeader(GoraAttribute.GORA_OPERATION.value); if (operation == null || operation.isEmpty()) { throw new RuntimeException("Gora operation is null or empty!"); } Object result = 0; // 0 used as default response in order to avoid null body exception if (GoraOperation.PUT.value.equalsIgnoreCase(operation)) { dataStore.put(getKeyFromExchange(exchange), getValueFromExchange(exchange)); } else if (GoraOperation.GET.value.equalsIgnoreCase(operation)) { result = dataStore.get(getKeyFromExchange(exchange)); } else if (GoraOperation.DELETE.value.equalsIgnoreCase(operation)) { result = dataStore.delete(getKeyFromExchange(exchange)); } else if (GoraOperation.QUERY.value.equalsIgnoreCase(operation)) { final Map<String, Object> props = exchange.getIn().getHeaders(); result = constractQueryFromPropertiesMap(props, dataStore, this.configuration).execute(); } else if (GoraOperation.DELETE_BY_QUERY.value.equalsIgnoreCase(operation)) { final Map<String, Object> props = exchange.getIn().getHeaders(); result = dataStore.deleteByQuery(constractQueryFromPropertiesMap(props, dataStore, this.configuration)); } else if (GoraOperation.GET_SCHEMA_NAME.value.equalsIgnoreCase(operation)) { result = dataStore.getSchemaName(); } else if (GoraOperation.DELETE_SCHEMA.value.equalsIgnoreCase(operation)) { dataStore.deleteSchema(); } else if (GoraOperation.CREATE_SCHEMA.value.equalsIgnoreCase(operation)) { dataStore.createSchema(); } else if (GoraOperation.SCHEMA_EXIST.value.equalsIgnoreCase(operation)) { result = dataStore.schemaExists(); } else { throw new RuntimeException("Unknown operation: " + operation); } /* from the tests auto-flush seems not to work always therefore a temporary solution is calling flush on every action */ if (configuration.isFlushOnEveryOperation()) { dataStore.flush(); } exchange.getOut().setBody(result); // preserve headers and attachments exchange.getOut().setHeaders(exchange.getIn().getHeaders()); exchange.getOut().setAttachments(exchange.getIn().getAttachments()); } }
brreitme/camel
components/camel-gora/src/main/java/org/apache/camel/component/gora/GoraProducer.java
Java
apache-2.0
4,978
/*! * UI development toolkit for HTML5 (OpenUI5) * (c) Copyright 2009-2016 SAP SE or an SAP affiliate company. * Licensed under the Apache License, Version 2.0 - see LICENSE.txt. */ sap.ui.define(function(){"use strict";var T={};T.render=function(r,c){var s=c.isInline()||this.hasControlData;if(!s){r.write("<div");r.writeControlData(c);r.writeStyles();r.writeClasses();r.write(">");}var R=this.renderTemplate||c.getTemplateRenderer();if(R){R.apply(this,arguments);}if(!s){r.write("</div>");}};return T;},true);
and1985129/digitalofficemobile
www/lib/ui5/sap/ui/core/tmpl/TemplateControlRenderer.js
JavaScript
apache-2.0
516
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.aries.spifly.mysvc.impl3; import org.apache.aries.spifly.mysvc.SPIProvider; public class SPIProviderImpl extends SPIProvider { @Override public String doit() { return "Doing it as well!"; } }
fwassmer/aries
spi-fly/spi-fly-examples/spi-fly-example-provider3-bundle/src/main/java/org/apache/aries/spifly/mysvc/impl3/SPIProviderImpl.java
Java
apache-2.0
1,055
// Copyright 2018 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // Flags: --allow-natives-syntax (function() { function foo(p) { return p.catch(); } %PrepareFunctionForOptimization(foo); foo(Promise.resolve(1)); foo(Promise.resolve(1)); %OptimizeFunctionOnNextCall(foo); foo(Promise.resolve(1)); })(); (function() { function foo(p) { return p.catch(foo); } %PrepareFunctionForOptimization(foo); foo(Promise.resolve(1)); foo(Promise.resolve(1)); %OptimizeFunctionOnNextCall(foo); foo(Promise.resolve(1)); })(); (function() { function foo(p) { return p.catch(foo, undefined); } %PrepareFunctionForOptimization(foo); foo(Promise.resolve(1)); foo(Promise.resolve(1)); %OptimizeFunctionOnNextCall(foo); foo(Promise.resolve(1)); })();
zero-rp/miniblink49
v8_7_5/test/mjsunit/compiler/promise-prototype-catch.js
JavaScript
apache-2.0
872
/* Copyright 2015 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package namespace import ( "fmt" "sync" "time" "github.com/golang/glog" "k8s.io/kubernetes/pkg/api/errors" "k8s.io/kubernetes/pkg/api/v1" metav1 "k8s.io/kubernetes/pkg/apis/meta/v1" "k8s.io/kubernetes/pkg/apis/meta/v1/unstructured" clientset "k8s.io/kubernetes/pkg/client/clientset_generated/clientset" "k8s.io/kubernetes/pkg/client/typed/discovery" "k8s.io/kubernetes/pkg/client/typed/dynamic" "k8s.io/kubernetes/pkg/runtime/schema" "k8s.io/kubernetes/pkg/util/sets" ) // contentRemainingError is used to inform the caller that content is not fully removed from the namespace type contentRemainingError struct { Estimate int64 } func (e *contentRemainingError) Error() string { return fmt.Sprintf("some content remains in the namespace, estimate %d seconds before it is removed", e.Estimate) } // operation is used for caching if an operation is supported on a dynamic client. type operation string const ( operationDeleteCollection operation = "deleteCollection" operationList operation = "list" // assume a default estimate for finalizers to complete when found on items pending deletion. finalizerEstimateSeconds int64 = int64(15) ) // operationKey is an entry in a cache. type operationKey struct { op operation gvr schema.GroupVersionResource } // operationNotSupportedCache is a simple cache to remember if an operation is not supported for a resource. // if the operationKey maps to true, it means the operation is not supported. type operationNotSupportedCache struct { lock sync.RWMutex m map[operationKey]bool } // isSupported returns true if the operation is supported func (o *operationNotSupportedCache) isSupported(key operationKey) bool { o.lock.RLock() defer o.lock.RUnlock() return !o.m[key] } func (o *operationNotSupportedCache) setNotSupported(key operationKey) { o.lock.Lock() defer o.lock.Unlock() o.m[key] = true } // updateNamespaceFunc is a function that makes an update to a namespace type updateNamespaceFunc func(kubeClient clientset.Interface, namespace *v1.Namespace) (*v1.Namespace, error) // retryOnConflictError retries the specified fn if there was a conflict error // it will return an error if the UID for an object changes across retry operations. // TODO RetryOnConflict should be a generic concept in client code func retryOnConflictError(kubeClient clientset.Interface, namespace *v1.Namespace, fn updateNamespaceFunc) (result *v1.Namespace, err error) { latestNamespace := namespace for { result, err = fn(kubeClient, latestNamespace) if err == nil { return result, nil } if !errors.IsConflict(err) { return nil, err } prevNamespace := latestNamespace latestNamespace, err = kubeClient.Core().Namespaces().Get(latestNamespace.Name, metav1.GetOptions{}) if err != nil { return nil, err } if prevNamespace.UID != latestNamespace.UID { return nil, fmt.Errorf("namespace uid has changed across retries") } } } // updateNamespaceStatusFunc will verify that the status of the namespace is correct func updateNamespaceStatusFunc(kubeClient clientset.Interface, namespace *v1.Namespace) (*v1.Namespace, error) { if namespace.DeletionTimestamp.IsZero() || namespace.Status.Phase == v1.NamespaceTerminating { return namespace, nil } newNamespace := v1.Namespace{} newNamespace.ObjectMeta = namespace.ObjectMeta newNamespace.Status = namespace.Status newNamespace.Status.Phase = v1.NamespaceTerminating return kubeClient.Core().Namespaces().UpdateStatus(&newNamespace) } // finalized returns true if the namespace.Spec.Finalizers is an empty list func finalized(namespace *v1.Namespace) bool { return len(namespace.Spec.Finalizers) == 0 } // finalizeNamespaceFunc returns a function that knows how to finalize a namespace for specified token. func finalizeNamespaceFunc(finalizerToken v1.FinalizerName) updateNamespaceFunc { return func(kubeClient clientset.Interface, namespace *v1.Namespace) (*v1.Namespace, error) { return finalizeNamespace(kubeClient, namespace, finalizerToken) } } // finalizeNamespace removes the specified finalizerToken and finalizes the namespace func finalizeNamespace(kubeClient clientset.Interface, namespace *v1.Namespace, finalizerToken v1.FinalizerName) (*v1.Namespace, error) { namespaceFinalize := v1.Namespace{} namespaceFinalize.ObjectMeta = namespace.ObjectMeta namespaceFinalize.Spec = namespace.Spec finalizerSet := sets.NewString() for i := range namespace.Spec.Finalizers { if namespace.Spec.Finalizers[i] != finalizerToken { finalizerSet.Insert(string(namespace.Spec.Finalizers[i])) } } namespaceFinalize.Spec.Finalizers = make([]v1.FinalizerName, 0, len(finalizerSet)) for _, value := range finalizerSet.List() { namespaceFinalize.Spec.Finalizers = append(namespaceFinalize.Spec.Finalizers, v1.FinalizerName(value)) } namespace, err := kubeClient.Core().Namespaces().Finalize(&namespaceFinalize) if err != nil { // it was removed already, so life is good if errors.IsNotFound(err) { return namespace, nil } } return namespace, err } // deleteCollection is a helper function that will delete the collection of resources // it returns true if the operation was supported on the server. // it returns an error if the operation was supported on the server but was unable to complete. func deleteCollection( dynamicClient *dynamic.Client, opCache *operationNotSupportedCache, gvr schema.GroupVersionResource, namespace string, ) (bool, error) { glog.V(5).Infof("namespace controller - deleteCollection - namespace: %s, gvr: %v", namespace, gvr) key := operationKey{op: operationDeleteCollection, gvr: gvr} if !opCache.isSupported(key) { glog.V(5).Infof("namespace controller - deleteCollection ignored since not supported - namespace: %s, gvr: %v", namespace, gvr) return false, nil } apiResource := metav1.APIResource{Name: gvr.Resource, Namespaced: true} // namespace controller does not want the garbage collector to insert the orphan finalizer since it calls // resource deletions generically. it will ensure all resources in the namespace are purged prior to releasing // namespace itself. orphanDependents := false err := dynamicClient.Resource(&apiResource, namespace).DeleteCollection(&v1.DeleteOptions{OrphanDependents: &orphanDependents}, &v1.ListOptions{}) if err == nil { return true, nil } // this is strange, but we need to special case for both MethodNotSupported and NotFound errors // TODO: https://github.com/kubernetes/kubernetes/issues/22413 // we have a resource returned in the discovery API that supports no top-level verbs: // /apis/extensions/v1beta1/namespaces/default/replicationcontrollers // when working with this resource type, we will get a literal not found error rather than expected method not supported // remember next time that this resource does not support delete collection... if errors.IsMethodNotSupported(err) || errors.IsNotFound(err) { glog.V(5).Infof("namespace controller - deleteCollection not supported - namespace: %s, gvr: %v", namespace, gvr) opCache.setNotSupported(key) return false, nil } glog.V(5).Infof("namespace controller - deleteCollection unexpected error - namespace: %s, gvr: %v, error: %v", namespace, gvr, err) return true, err } // listCollection will list the items in the specified namespace // it returns the following: // the list of items in the collection (if found) // a boolean if the operation is supported // an error if the operation is supported but could not be completed. func listCollection( dynamicClient *dynamic.Client, opCache *operationNotSupportedCache, gvr schema.GroupVersionResource, namespace string, ) (*unstructured.UnstructuredList, bool, error) { glog.V(5).Infof("namespace controller - listCollection - namespace: %s, gvr: %v", namespace, gvr) key := operationKey{op: operationList, gvr: gvr} if !opCache.isSupported(key) { glog.V(5).Infof("namespace controller - listCollection ignored since not supported - namespace: %s, gvr: %v", namespace, gvr) return nil, false, nil } apiResource := metav1.APIResource{Name: gvr.Resource, Namespaced: true} obj, err := dynamicClient.Resource(&apiResource, namespace).List(&v1.ListOptions{}) if err == nil { unstructuredList, ok := obj.(*unstructured.UnstructuredList) if !ok { return nil, false, fmt.Errorf("resource: %s, expected *unstructured.UnstructuredList, got %#v", apiResource.Name, obj) } return unstructuredList, true, nil } // this is strange, but we need to special case for both MethodNotSupported and NotFound errors // TODO: https://github.com/kubernetes/kubernetes/issues/22413 // we have a resource returned in the discovery API that supports no top-level verbs: // /apis/extensions/v1beta1/namespaces/default/replicationcontrollers // when working with this resource type, we will get a literal not found error rather than expected method not supported // remember next time that this resource does not support delete collection... if errors.IsMethodNotSupported(err) || errors.IsNotFound(err) { glog.V(5).Infof("namespace controller - listCollection not supported - namespace: %s, gvr: %v", namespace, gvr) opCache.setNotSupported(key) return nil, false, nil } return nil, true, err } // deleteEachItem is a helper function that will list the collection of resources and delete each item 1 by 1. func deleteEachItem( dynamicClient *dynamic.Client, opCache *operationNotSupportedCache, gvr schema.GroupVersionResource, namespace string, ) error { glog.V(5).Infof("namespace controller - deleteEachItem - namespace: %s, gvr: %v", namespace, gvr) unstructuredList, listSupported, err := listCollection(dynamicClient, opCache, gvr, namespace) if err != nil { return err } if !listSupported { return nil } apiResource := metav1.APIResource{Name: gvr.Resource, Namespaced: true} for _, item := range unstructuredList.Items { if err = dynamicClient.Resource(&apiResource, namespace).Delete(item.GetName(), nil); err != nil && !errors.IsNotFound(err) && !errors.IsMethodNotSupported(err) { return err } } return nil } // deleteAllContentForGroupVersionResource will use the dynamic client to delete each resource identified in gvr. // It returns an estimate of the time remaining before the remaining resources are deleted. // If estimate > 0, not all resources are guaranteed to be gone. func deleteAllContentForGroupVersionResource( kubeClient clientset.Interface, clientPool dynamic.ClientPool, opCache *operationNotSupportedCache, gvr schema.GroupVersionResource, namespace string, namespaceDeletedAt metav1.Time, ) (int64, error) { glog.V(5).Infof("namespace controller - deleteAllContentForGroupVersionResource - namespace: %s, gvr: %v", namespace, gvr) // estimate how long it will take for the resource to be deleted (needed for objects that support graceful delete) estimate, err := estimateGracefulTermination(kubeClient, gvr, namespace, namespaceDeletedAt) if err != nil { glog.V(5).Infof("namespace controller - deleteAllContentForGroupVersionResource - unable to estimate - namespace: %s, gvr: %v, err: %v", namespace, gvr, err) return estimate, err } glog.V(5).Infof("namespace controller - deleteAllContentForGroupVersionResource - estimate - namespace: %s, gvr: %v, estimate: %v", namespace, gvr, estimate) // get a client for this group version... dynamicClient, err := clientPool.ClientForGroupVersionResource(gvr) if err != nil { glog.V(5).Infof("namespace controller - deleteAllContentForGroupVersionResource - unable to get client - namespace: %s, gvr: %v, err: %v", namespace, gvr, err) return estimate, err } // first try to delete the entire collection deleteCollectionSupported, err := deleteCollection(dynamicClient, opCache, gvr, namespace) if err != nil { return estimate, err } // delete collection was not supported, so we list and delete each item... if !deleteCollectionSupported { err = deleteEachItem(dynamicClient, opCache, gvr, namespace) if err != nil { return estimate, err } } // verify there are no more remaining items // it is not an error condition for there to be remaining items if local estimate is non-zero glog.V(5).Infof("namespace controller - deleteAllContentForGroupVersionResource - checking for no more items in namespace: %s, gvr: %v", namespace, gvr) unstructuredList, listSupported, err := listCollection(dynamicClient, opCache, gvr, namespace) if err != nil { glog.V(5).Infof("namespace controller - deleteAllContentForGroupVersionResource - error verifying no items in namespace: %s, gvr: %v, err: %v", namespace, gvr, err) return estimate, err } if !listSupported { return estimate, nil } glog.V(5).Infof("namespace controller - deleteAllContentForGroupVersionResource - items remaining - namespace: %s, gvr: %v, items: %v", namespace, gvr, len(unstructuredList.Items)) if len(unstructuredList.Items) != 0 && estimate == int64(0) { // if any item has a finalizer, we treat that as a normal condition, and use a default estimation to allow for GC to complete. for _, item := range unstructuredList.Items { if len(item.GetFinalizers()) > 0 { glog.V(5).Infof("namespace controller - deleteAllContentForGroupVersionResource - items remaining with finalizers - namespace: %s, gvr: %v, finalizers: %v", namespace, gvr, item.GetFinalizers()) return finalizerEstimateSeconds, nil } } // nothing reported a finalizer, so something was unexpected as it should have been deleted. return estimate, fmt.Errorf("unexpected items still remain in namespace: %s for gvr: %v", namespace, gvr) } return estimate, nil } // deleteAllContent will use the dynamic client to delete each resource identified in groupVersionResources. // It returns an estimate of the time remaining before the remaining resources are deleted. // If estimate > 0, not all resources are guaranteed to be gone. func deleteAllContent( kubeClient clientset.Interface, clientPool dynamic.ClientPool, opCache *operationNotSupportedCache, groupVersionResources map[schema.GroupVersionResource]struct{}, namespace string, namespaceDeletedAt metav1.Time, ) (int64, error) { estimate := int64(0) glog.V(4).Infof("namespace controller - deleteAllContent - namespace: %s, gvrs: %v", namespace, groupVersionResources) for gvr := range groupVersionResources { gvrEstimate, err := deleteAllContentForGroupVersionResource(kubeClient, clientPool, opCache, gvr, namespace, namespaceDeletedAt) if err != nil { return estimate, err } if gvrEstimate > estimate { estimate = gvrEstimate } } glog.V(4).Infof("namespace controller - deleteAllContent - namespace: %s, estimate: %v", namespace, estimate) return estimate, nil } // syncNamespace orchestrates deletion of a Namespace and its associated content. func syncNamespace( kubeClient clientset.Interface, clientPool dynamic.ClientPool, opCache *operationNotSupportedCache, discoverResourcesFn func() ([]*metav1.APIResourceList, error), namespace *v1.Namespace, finalizerToken v1.FinalizerName, ) error { if namespace.DeletionTimestamp == nil { return nil } // multiple controllers may edit a namespace during termination // first get the latest state of the namespace before proceeding // if the namespace was deleted already, don't do anything namespace, err := kubeClient.Core().Namespaces().Get(namespace.Name, metav1.GetOptions{}) if err != nil { if errors.IsNotFound(err) { return nil } return err } glog.V(5).Infof("namespace controller - syncNamespace - namespace: %s, finalizerToken: %s", namespace.Name, finalizerToken) // ensure that the status is up to date on the namespace // if we get a not found error, we assume the namespace is truly gone namespace, err = retryOnConflictError(kubeClient, namespace, updateNamespaceStatusFunc) if err != nil { if errors.IsNotFound(err) { return nil } return err } // the latest view of the namespace asserts that namespace is no longer deleting.. if namespace.DeletionTimestamp.IsZero() { return nil } // if the namespace is already finalized, delete it if finalized(namespace) { var opts *v1.DeleteOptions uid := namespace.UID if len(uid) > 0 { opts = &v1.DeleteOptions{Preconditions: &v1.Preconditions{UID: &uid}} } err = kubeClient.Core().Namespaces().Delete(namespace.Name, opts) if err != nil && !errors.IsNotFound(err) { return err } return nil } // there may still be content for us to remove resources, err := discoverResourcesFn() if err != nil { return err } // TODO(sttts): get rid of opCache and pass the verbs (especially "deletecollection") down into the deleter deletableResources := discovery.FilteredBy(discovery.SupportsAllVerbs{Verbs: []string{"delete"}}, resources) groupVersionResources, err := discovery.GroupVersionResources(deletableResources) if err != nil { return err } estimate, err := deleteAllContent(kubeClient, clientPool, opCache, groupVersionResources, namespace.Name, *namespace.DeletionTimestamp) if err != nil { return err } if estimate > 0 { return &contentRemainingError{estimate} } // we have removed content, so mark it finalized by us result, err := retryOnConflictError(kubeClient, namespace, finalizeNamespaceFunc(finalizerToken)) if err != nil { // in normal practice, this should not be possible, but if a deployment is running // two controllers to do namespace deletion that share a common finalizer token it's // possible that a not found could occur since the other controller would have finished the delete. if errors.IsNotFound(err) { return nil } return err } // now check if all finalizers have reported that we delete now if finalized(result) { err = kubeClient.Core().Namespaces().Delete(namespace.Name, nil) if err != nil && !errors.IsNotFound(err) { return err } } return nil } // estimateGrracefulTermination will estimate the graceful termination required for the specific entity in the namespace func estimateGracefulTermination(kubeClient clientset.Interface, groupVersionResource schema.GroupVersionResource, ns string, namespaceDeletedAt metav1.Time) (int64, error) { groupResource := groupVersionResource.GroupResource() glog.V(5).Infof("namespace controller - estimateGracefulTermination - group %s, resource: %s", groupResource.Group, groupResource.Resource) estimate := int64(0) var err error switch groupResource { case schema.GroupResource{Group: "", Resource: "pods"}: estimate, err = estimateGracefulTerminationForPods(kubeClient, ns) } if err != nil { return estimate, err } // determine if the estimate is greater than the deletion timestamp duration := time.Since(namespaceDeletedAt.Time) allowedEstimate := time.Duration(estimate) * time.Second if duration >= allowedEstimate { estimate = int64(0) } return estimate, nil } // estimateGracefulTerminationForPods determines the graceful termination period for pods in the namespace func estimateGracefulTerminationForPods(kubeClient clientset.Interface, ns string) (int64, error) { glog.V(5).Infof("namespace controller - estimateGracefulTerminationForPods - namespace %s", ns) estimate := int64(0) items, err := kubeClient.Core().Pods(ns).List(v1.ListOptions{}) if err != nil { return estimate, err } for i := range items.Items { // filter out terminal pods phase := items.Items[i].Status.Phase if v1.PodSucceeded == phase || v1.PodFailed == phase { continue } if items.Items[i].Spec.TerminationGracePeriodSeconds != nil { grace := *items.Items[i].Spec.TerminationGracePeriodSeconds if grace > estimate { estimate = grace } } } return estimate, nil }
ikropotov/kops
vendor/k8s.io/kubernetes/pkg/controller/namespace/namespace_controller_utils.go
GO
apache-2.0
20,246
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ var LoginController = function($scope, $log, $uibModal, authService, userService) { $scope.credentials = { username: '', password: '' }; $scope.login = function($event, credentials) { var $btn = $($event.target); $btn.prop('disabled', true); // disable the login button to prevent multiple clicks authService.login(credentials.username, credentials.password) .then( function() { $btn.prop('disabled', false); // re-enable it } ); }; $scope.resetPassword = function() { var modalInstance = $uibModal.open({ templateUrl: 'common/modules/dialog/reset/dialog.reset.tpl.html', controller: 'DialogResetController' }); modalInstance.result.then(function(email) { userService.resetPassword(email); }, function () { }); }; var init = function() {}; init(); }; LoginController.$inject = ['$scope', '$log', '$uibModal', 'authService', 'userService']; module.exports = LoginController;
jeffmart/incubator-trafficcontrol
traffic_portal/app/src/modules/public/login/LoginController.js
JavaScript
apache-2.0
1,916
import { j as _inherits, k as _createSuper, c as _classCallCheck, T as Type, b as _createClass, R as Range, N as Node, g as YAMLSemanticError, l as _get, m as _getPrototypeOf, Y as YAMLSyntaxError, C as Char, e as _defineProperty, P as PlainValue } from './PlainValue-b8036b75.js'; var BlankLine = /*#__PURE__*/function (_Node) { _inherits(BlankLine, _Node); var _super = _createSuper(BlankLine); function BlankLine() { _classCallCheck(this, BlankLine); return _super.call(this, Type.BLANK_LINE); } /* istanbul ignore next */ _createClass(BlankLine, [{ key: "includesTrailingLines", get: function get() { // This is never called from anywhere, but if it were, // this is the value it should return. return true; } /** * Parses a blank line from the source * * @param {ParseContext} context * @param {number} start - Index of first \n character * @returns {number} - Index of the character after this */ }, { key: "parse", value: function parse(context, start) { this.context = context; this.range = new Range(start, start + 1); return start + 1; } }]); return BlankLine; }(Node); var CollectionItem = /*#__PURE__*/function (_Node) { _inherits(CollectionItem, _Node); var _super = _createSuper(CollectionItem); function CollectionItem(type, props) { var _this; _classCallCheck(this, CollectionItem); _this = _super.call(this, type, props); _this.node = null; return _this; } _createClass(CollectionItem, [{ key: "includesTrailingLines", get: function get() { return !!this.node && this.node.includesTrailingLines; } /** * @param {ParseContext} context * @param {number} start - Index of first character * @returns {number} - Index of the character after this */ }, { key: "parse", value: function parse(context, start) { this.context = context; var parseNode = context.parseNode, src = context.src; var atLineStart = context.atLineStart, lineStart = context.lineStart; if (!atLineStart && this.type === Type.SEQ_ITEM) this.error = new YAMLSemanticError(this, 'Sequence items must not have preceding content on the same line'); var indent = atLineStart ? start - lineStart : context.indent; var offset = Node.endOfWhiteSpace(src, start + 1); var ch = src[offset]; var inlineComment = ch === '#'; var comments = []; var blankLine = null; while (ch === '\n' || ch === '#') { if (ch === '#') { var _end = Node.endOfLine(src, offset + 1); comments.push(new Range(offset, _end)); offset = _end; } else { atLineStart = true; lineStart = offset + 1; var wsEnd = Node.endOfWhiteSpace(src, lineStart); if (src[wsEnd] === '\n' && comments.length === 0) { blankLine = new BlankLine(); lineStart = blankLine.parse({ src: src }, lineStart); } offset = Node.endOfIndent(src, lineStart); } ch = src[offset]; } if (Node.nextNodeIsIndented(ch, offset - (lineStart + indent), this.type !== Type.SEQ_ITEM)) { this.node = parseNode({ atLineStart: atLineStart, inCollection: false, indent: indent, lineStart: lineStart, parent: this }, offset); } else if (ch && lineStart > start + 1) { offset = lineStart - 1; } if (this.node) { if (blankLine) { // Only blank lines preceding non-empty nodes are captured. Note that // this means that collection item range start indices do not always // increase monotonically. -- eemeli/yaml#126 var items = context.parent.items || context.parent.contents; if (items) items.push(blankLine); } if (comments.length) Array.prototype.push.apply(this.props, comments); offset = this.node.range.end; } else { if (inlineComment) { var c = comments[0]; this.props.push(c); offset = c.end; } else { offset = Node.endOfLine(src, start + 1); } } var end = this.node ? this.node.valueRange.end : offset; this.valueRange = new Range(start, end); return offset; } }, { key: "setOrigRanges", value: function setOrigRanges(cr, offset) { offset = _get(_getPrototypeOf(CollectionItem.prototype), "setOrigRanges", this).call(this, cr, offset); return this.node ? this.node.setOrigRanges(cr, offset) : offset; } }, { key: "toString", value: function toString() { var src = this.context.src, node = this.node, range = this.range, value = this.value; if (value != null) return value; var str = node ? src.slice(range.start, node.range.start) + String(node) : src.slice(range.start, range.end); return Node.addStringTerminator(src, range.end, str); } }]); return CollectionItem; }(Node); var Comment = /*#__PURE__*/function (_Node) { _inherits(Comment, _Node); var _super = _createSuper(Comment); function Comment() { _classCallCheck(this, Comment); return _super.call(this, Type.COMMENT); } /** * Parses a comment line from the source * * @param {ParseContext} context * @param {number} start - Index of first character * @returns {number} - Index of the character after this scalar */ _createClass(Comment, [{ key: "parse", value: function parse(context, start) { this.context = context; var offset = this.parseComment(start); this.range = new Range(start, offset); return offset; } }]); return Comment; }(Node); function grabCollectionEndComments(node) { var cnode = node; while (cnode instanceof CollectionItem) { cnode = cnode.node; } if (!(cnode instanceof Collection)) return null; var len = cnode.items.length; var ci = -1; for (var i = len - 1; i >= 0; --i) { var n = cnode.items[i]; if (n.type === Type.COMMENT) { // Keep sufficiently indented comments with preceding node var _n$context = n.context, indent = _n$context.indent, lineStart = _n$context.lineStart; if (indent > 0 && n.range.start >= lineStart + indent) break; ci = i; } else if (n.type === Type.BLANK_LINE) ci = i;else break; } if (ci === -1) return null; var ca = cnode.items.splice(ci, len - ci); var prevEnd = ca[0].range.start; while (true) { cnode.range.end = prevEnd; if (cnode.valueRange && cnode.valueRange.end > prevEnd) cnode.valueRange.end = prevEnd; if (cnode === node) break; cnode = cnode.context.parent; } return ca; } var Collection = /*#__PURE__*/function (_Node) { _inherits(Collection, _Node); var _super = _createSuper(Collection); function Collection(firstItem) { var _this; _classCallCheck(this, Collection); _this = _super.call(this, firstItem.type === Type.SEQ_ITEM ? Type.SEQ : Type.MAP); for (var i = firstItem.props.length - 1; i >= 0; --i) { if (firstItem.props[i].start < firstItem.context.lineStart) { // props on previous line are assumed by the collection _this.props = firstItem.props.slice(0, i + 1); firstItem.props = firstItem.props.slice(i + 1); var itemRange = firstItem.props[0] || firstItem.valueRange; firstItem.range.start = itemRange.start; break; } } _this.items = [firstItem]; var ec = grabCollectionEndComments(firstItem); if (ec) Array.prototype.push.apply(_this.items, ec); return _this; } _createClass(Collection, [{ key: "includesTrailingLines", get: function get() { return this.items.length > 0; } /** * @param {ParseContext} context * @param {number} start - Index of first character * @returns {number} - Index of the character after this */ }, { key: "parse", value: function parse(context, start) { this.context = context; var parseNode = context.parseNode, src = context.src; // It's easier to recalculate lineStart here rather than tracking down the // last context from which to read it -- eemeli/yaml#2 var lineStart = Node.startOfLine(src, start); var firstItem = this.items[0]; // First-item context needs to be correct for later comment handling // -- eemeli/yaml#17 firstItem.context.parent = this; this.valueRange = Range.copy(firstItem.valueRange); var indent = firstItem.range.start - firstItem.context.lineStart; var offset = start; offset = Node.normalizeOffset(src, offset); var ch = src[offset]; var atLineStart = Node.endOfWhiteSpace(src, lineStart) === offset; var prevIncludesTrailingLines = false; while (ch) { while (ch === '\n' || ch === '#') { if (atLineStart && ch === '\n' && !prevIncludesTrailingLines) { var blankLine = new BlankLine(); offset = blankLine.parse({ src: src }, offset); this.valueRange.end = offset; if (offset >= src.length) { ch = null; break; } this.items.push(blankLine); offset -= 1; // blankLine.parse() consumes terminal newline } else if (ch === '#') { if (offset < lineStart + indent && !Collection.nextContentHasIndent(src, offset, indent)) { return offset; } var comment = new Comment(); offset = comment.parse({ indent: indent, lineStart: lineStart, src: src }, offset); this.items.push(comment); this.valueRange.end = offset; if (offset >= src.length) { ch = null; break; } } lineStart = offset + 1; offset = Node.endOfIndent(src, lineStart); if (Node.atBlank(src, offset)) { var wsEnd = Node.endOfWhiteSpace(src, offset); var next = src[wsEnd]; if (!next || next === '\n' || next === '#') { offset = wsEnd; } } ch = src[offset]; atLineStart = true; } if (!ch) { break; } if (offset !== lineStart + indent && (atLineStart || ch !== ':')) { if (offset < lineStart + indent) { if (lineStart > start) offset = lineStart; break; } else if (!this.error) { var msg = 'All collection items must start at the same column'; this.error = new YAMLSyntaxError(this, msg); } } if (firstItem.type === Type.SEQ_ITEM) { if (ch !== '-') { if (lineStart > start) offset = lineStart; break; } } else if (ch === '-' && !this.error) { // map key may start with -, as long as it's followed by a non-whitespace char var _next = src[offset + 1]; if (!_next || _next === '\n' || _next === '\t' || _next === ' ') { var _msg = 'A collection cannot be both a mapping and a sequence'; this.error = new YAMLSyntaxError(this, _msg); } } var node = parseNode({ atLineStart: atLineStart, inCollection: true, indent: indent, lineStart: lineStart, parent: this }, offset); if (!node) return offset; // at next document start this.items.push(node); this.valueRange.end = node.valueRange.end; offset = Node.normalizeOffset(src, node.range.end); ch = src[offset]; atLineStart = false; prevIncludesTrailingLines = node.includesTrailingLines; // Need to reset lineStart and atLineStart here if preceding node's range // has advanced to check the current line's indentation level // -- eemeli/yaml#10 & eemeli/yaml#38 if (ch) { var ls = offset - 1; var prev = src[ls]; while (prev === ' ' || prev === '\t') { prev = src[--ls]; } if (prev === '\n') { lineStart = ls + 1; atLineStart = true; } } var ec = grabCollectionEndComments(node); if (ec) Array.prototype.push.apply(this.items, ec); } return offset; } }, { key: "setOrigRanges", value: function setOrigRanges(cr, offset) { offset = _get(_getPrototypeOf(Collection.prototype), "setOrigRanges", this).call(this, cr, offset); this.items.forEach(function (node) { offset = node.setOrigRanges(cr, offset); }); return offset; } }, { key: "toString", value: function toString() { var src = this.context.src, items = this.items, range = this.range, value = this.value; if (value != null) return value; var str = src.slice(range.start, items[0].range.start) + String(items[0]); for (var i = 1; i < items.length; ++i) { var item = items[i]; var _item$context = item.context, atLineStart = _item$context.atLineStart, indent = _item$context.indent; if (atLineStart) for (var _i = 0; _i < indent; ++_i) { str += ' '; } str += String(item); } return Node.addStringTerminator(src, range.end, str); } }], [{ key: "nextContentHasIndent", value: function nextContentHasIndent(src, offset, indent) { var lineStart = Node.endOfLine(src, offset) + 1; offset = Node.endOfWhiteSpace(src, lineStart); var ch = src[offset]; if (!ch) return false; if (offset >= lineStart + indent) return true; if (ch !== '#' && ch !== '\n') return false; return Collection.nextContentHasIndent(src, offset, indent); } }]); return Collection; }(Node); var Directive = /*#__PURE__*/function (_Node) { _inherits(Directive, _Node); var _super = _createSuper(Directive); function Directive() { var _this; _classCallCheck(this, Directive); _this = _super.call(this, Type.DIRECTIVE); _this.name = null; return _this; } _createClass(Directive, [{ key: "parameters", get: function get() { var raw = this.rawValue; return raw ? raw.trim().split(/[ \t]+/) : []; } }, { key: "parseName", value: function parseName(start) { var src = this.context.src; var offset = start; var ch = src[offset]; while (ch && ch !== '\n' && ch !== '\t' && ch !== ' ') { ch = src[offset += 1]; } this.name = src.slice(start, offset); return offset; } }, { key: "parseParameters", value: function parseParameters(start) { var src = this.context.src; var offset = start; var ch = src[offset]; while (ch && ch !== '\n' && ch !== '#') { ch = src[offset += 1]; } this.valueRange = new Range(start, offset); return offset; } }, { key: "parse", value: function parse(context, start) { this.context = context; var offset = this.parseName(start + 1); offset = this.parseParameters(offset); offset = this.parseComment(offset); this.range = new Range(start, offset); return offset; } }]); return Directive; }(Node); var Document = /*#__PURE__*/function (_Node) { _inherits(Document, _Node); var _super = _createSuper(Document); function Document() { var _this; _classCallCheck(this, Document); _this = _super.call(this, Type.DOCUMENT); _this.directives = null; _this.contents = null; _this.directivesEndMarker = null; _this.documentEndMarker = null; return _this; } _createClass(Document, [{ key: "parseDirectives", value: function parseDirectives(start) { var src = this.context.src; this.directives = []; var atLineStart = true; var hasDirectives = false; var offset = start; while (!Node.atDocumentBoundary(src, offset, Char.DIRECTIVES_END)) { offset = Document.startCommentOrEndBlankLine(src, offset); switch (src[offset]) { case '\n': if (atLineStart) { var blankLine = new BlankLine(); offset = blankLine.parse({ src: src }, offset); if (offset < src.length) { this.directives.push(blankLine); } } else { offset += 1; atLineStart = true; } break; case '#': { var comment = new Comment(); offset = comment.parse({ src: src }, offset); this.directives.push(comment); atLineStart = false; } break; case '%': { var directive = new Directive(); offset = directive.parse({ parent: this, src: src }, offset); this.directives.push(directive); hasDirectives = true; atLineStart = false; } break; default: if (hasDirectives) { this.error = new YAMLSemanticError(this, 'Missing directives-end indicator line'); } else if (this.directives.length > 0) { this.contents = this.directives; this.directives = []; } return offset; } } if (src[offset]) { this.directivesEndMarker = new Range(offset, offset + 3); return offset + 3; } if (hasDirectives) { this.error = new YAMLSemanticError(this, 'Missing directives-end indicator line'); } else if (this.directives.length > 0) { this.contents = this.directives; this.directives = []; } return offset; } }, { key: "parseContents", value: function parseContents(start) { var _this$context = this.context, parseNode = _this$context.parseNode, src = _this$context.src; if (!this.contents) this.contents = []; var lineStart = start; while (src[lineStart - 1] === '-') { lineStart -= 1; } var offset = Node.endOfWhiteSpace(src, start); var atLineStart = lineStart === start; this.valueRange = new Range(offset); while (!Node.atDocumentBoundary(src, offset, Char.DOCUMENT_END)) { switch (src[offset]) { case '\n': if (atLineStart) { var blankLine = new BlankLine(); offset = blankLine.parse({ src: src }, offset); if (offset < src.length) { this.contents.push(blankLine); } } else { offset += 1; atLineStart = true; } lineStart = offset; break; case '#': { var comment = new Comment(); offset = comment.parse({ src: src }, offset); this.contents.push(comment); atLineStart = false; } break; default: { var iEnd = Node.endOfIndent(src, offset); var context = { atLineStart: atLineStart, indent: -1, inFlow: false, inCollection: false, lineStart: lineStart, parent: this }; var node = parseNode(context, iEnd); if (!node) return this.valueRange.end = iEnd; // at next document start this.contents.push(node); offset = node.range.end; atLineStart = false; var ec = grabCollectionEndComments(node); if (ec) Array.prototype.push.apply(this.contents, ec); } } offset = Document.startCommentOrEndBlankLine(src, offset); } this.valueRange.end = offset; if (src[offset]) { this.documentEndMarker = new Range(offset, offset + 3); offset += 3; if (src[offset]) { offset = Node.endOfWhiteSpace(src, offset); if (src[offset] === '#') { var _comment = new Comment(); offset = _comment.parse({ src: src }, offset); this.contents.push(_comment); } switch (src[offset]) { case '\n': offset += 1; break; case undefined: break; default: this.error = new YAMLSyntaxError(this, 'Document end marker line cannot have a non-comment suffix'); } } } return offset; } /** * @param {ParseContext} context * @param {number} start - Index of first character * @returns {number} - Index of the character after this */ }, { key: "parse", value: function parse(context, start) { context.root = this; this.context = context; var src = context.src; var offset = src.charCodeAt(start) === 0xfeff ? start + 1 : start; // skip BOM offset = this.parseDirectives(offset); offset = this.parseContents(offset); return offset; } }, { key: "setOrigRanges", value: function setOrigRanges(cr, offset) { offset = _get(_getPrototypeOf(Document.prototype), "setOrigRanges", this).call(this, cr, offset); this.directives.forEach(function (node) { offset = node.setOrigRanges(cr, offset); }); if (this.directivesEndMarker) offset = this.directivesEndMarker.setOrigRange(cr, offset); this.contents.forEach(function (node) { offset = node.setOrigRanges(cr, offset); }); if (this.documentEndMarker) offset = this.documentEndMarker.setOrigRange(cr, offset); return offset; } }, { key: "toString", value: function toString() { var contents = this.contents, directives = this.directives, value = this.value; if (value != null) return value; var str = directives.join(''); if (contents.length > 0) { if (directives.length > 0 || contents[0].type === Type.COMMENT) str += '---\n'; str += contents.join(''); } if (str[str.length - 1] !== '\n') str += '\n'; return str; } }], [{ key: "startCommentOrEndBlankLine", value: function startCommentOrEndBlankLine(src, start) { var offset = Node.endOfWhiteSpace(src, start); var ch = src[offset]; return ch === '#' || ch === '\n' ? offset : start; } }]); return Document; }(Node); var Alias = /*#__PURE__*/function (_Node) { _inherits(Alias, _Node); var _super = _createSuper(Alias); function Alias() { _classCallCheck(this, Alias); return _super.apply(this, arguments); } _createClass(Alias, [{ key: "parse", value: /** * Parses an *alias from the source * * @param {ParseContext} context * @param {number} start - Index of first character * @returns {number} - Index of the character after this scalar */ function parse(context, start) { this.context = context; var src = context.src; var offset = Node.endOfIdentifier(src, start + 1); this.valueRange = new Range(start + 1, offset); offset = Node.endOfWhiteSpace(src, offset); offset = this.parseComment(offset); return offset; } }]); return Alias; }(Node); var Chomp = { CLIP: 'CLIP', KEEP: 'KEEP', STRIP: 'STRIP' }; var BlockValue = /*#__PURE__*/function (_Node) { _inherits(BlockValue, _Node); var _super = _createSuper(BlockValue); function BlockValue(type, props) { var _this; _classCallCheck(this, BlockValue); _this = _super.call(this, type, props); _this.blockIndent = null; _this.chomping = Chomp.CLIP; _this.header = null; return _this; } _createClass(BlockValue, [{ key: "includesTrailingLines", get: function get() { return this.chomping === Chomp.KEEP; } }, { key: "strValue", get: function get() { if (!this.valueRange || !this.context) return null; var _this$valueRange = this.valueRange, start = _this$valueRange.start, end = _this$valueRange.end; var _this$context = this.context, indent = _this$context.indent, src = _this$context.src; if (this.valueRange.isEmpty()) return ''; var lastNewLine = null; var ch = src[end - 1]; while (ch === '\n' || ch === '\t' || ch === ' ') { end -= 1; if (end <= start) { if (this.chomping === Chomp.KEEP) break;else return ''; // probably never happens } if (ch === '\n') lastNewLine = end; ch = src[end - 1]; } var keepStart = end + 1; if (lastNewLine) { if (this.chomping === Chomp.KEEP) { keepStart = lastNewLine; end = this.valueRange.end; } else { end = lastNewLine; } } var bi = indent + this.blockIndent; var folded = this.type === Type.BLOCK_FOLDED; var atStart = true; var str = ''; var sep = ''; var prevMoreIndented = false; for (var i = start; i < end; ++i) { for (var j = 0; j < bi; ++j) { if (src[i] !== ' ') break; i += 1; } var _ch = src[i]; if (_ch === '\n') { if (sep === '\n') str += '\n';else sep = '\n'; } else { var lineEnd = Node.endOfLine(src, i); var line = src.slice(i, lineEnd); i = lineEnd; if (folded && (_ch === ' ' || _ch === '\t') && i < keepStart) { if (sep === ' ') sep = '\n';else if (!prevMoreIndented && !atStart && sep === '\n') sep = '\n\n'; str += sep + line; //+ ((lineEnd < end && src[lineEnd]) || '') sep = lineEnd < end && src[lineEnd] || ''; prevMoreIndented = true; } else { str += sep + line; sep = folded && i < keepStart ? ' ' : '\n'; prevMoreIndented = false; } if (atStart && line !== '') atStart = false; } } return this.chomping === Chomp.STRIP ? str : str + '\n'; } }, { key: "parseBlockHeader", value: function parseBlockHeader(start) { var src = this.context.src; var offset = start + 1; var bi = ''; while (true) { var ch = src[offset]; switch (ch) { case '-': this.chomping = Chomp.STRIP; break; case '+': this.chomping = Chomp.KEEP; break; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': bi += ch; break; default: this.blockIndent = Number(bi) || null; this.header = new Range(start, offset); return offset; } offset += 1; } } }, { key: "parseBlockValue", value: function parseBlockValue(start) { var _this$context2 = this.context, indent = _this$context2.indent, src = _this$context2.src; var explicit = !!this.blockIndent; var offset = start; var valueEnd = start; var minBlockIndent = 1; for (var ch = src[offset]; ch === '\n'; ch = src[offset]) { offset += 1; if (Node.atDocumentBoundary(src, offset)) break; var end = Node.endOfBlockIndent(src, indent, offset); // should not include tab? if (end === null) break; var _ch2 = src[end]; var lineIndent = end - (offset + indent); if (!this.blockIndent) { // no explicit block indent, none yet detected if (src[end] !== '\n') { // first line with non-whitespace content if (lineIndent < minBlockIndent) { var msg = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator'; this.error = new YAMLSemanticError(this, msg); } this.blockIndent = lineIndent; } else if (lineIndent > minBlockIndent) { // empty line with more whitespace minBlockIndent = lineIndent; } } else if (_ch2 && _ch2 !== '\n' && lineIndent < this.blockIndent) { if (src[end] === '#') break; if (!this.error) { var _src = explicit ? 'explicit indentation indicator' : 'first line'; var _msg = "Block scalars must not be less indented than their ".concat(_src); this.error = new YAMLSemanticError(this, _msg); } } if (src[end] === '\n') { offset = end; } else { offset = valueEnd = Node.endOfLine(src, end); } } if (this.chomping !== Chomp.KEEP) { offset = src[valueEnd] ? valueEnd + 1 : valueEnd; } this.valueRange = new Range(start + 1, offset); return offset; } /** * Parses a block value from the source * * Accepted forms are: * ``` * BS * block * lines * * BS #comment * block * lines * ``` * where the block style BS matches the regexp `[|>][-+1-9]*` and block lines * are empty or have an indent level greater than `indent`. * * @param {ParseContext} context * @param {number} start - Index of first character * @returns {number} - Index of the character after this block */ }, { key: "parse", value: function parse(context, start) { this.context = context; var src = context.src; var offset = this.parseBlockHeader(start); offset = Node.endOfWhiteSpace(src, offset); offset = this.parseComment(offset); offset = this.parseBlockValue(offset); return offset; } }, { key: "setOrigRanges", value: function setOrigRanges(cr, offset) { offset = _get(_getPrototypeOf(BlockValue.prototype), "setOrigRanges", this).call(this, cr, offset); return this.header ? this.header.setOrigRange(cr, offset) : offset; } }]); return BlockValue; }(Node); var FlowCollection = /*#__PURE__*/function (_Node) { _inherits(FlowCollection, _Node); var _super = _createSuper(FlowCollection); function FlowCollection(type, props) { var _this; _classCallCheck(this, FlowCollection); _this = _super.call(this, type, props); _this.items = null; return _this; } _createClass(FlowCollection, [{ key: "prevNodeIsJsonLike", value: function prevNodeIsJsonLike() { var idx = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : this.items.length; var node = this.items[idx - 1]; return !!node && (node.jsonLike || node.type === Type.COMMENT && this.prevNodeIsJsonLike(idx - 1)); } /** * @param {ParseContext} context * @param {number} start - Index of first character * @returns {number} - Index of the character after this */ }, { key: "parse", value: function parse(context, start) { this.context = context; var parseNode = context.parseNode, src = context.src; var indent = context.indent, lineStart = context.lineStart; var char = src[start]; // { or [ this.items = [{ char: char, offset: start }]; var offset = Node.endOfWhiteSpace(src, start + 1); char = src[offset]; while (char && char !== ']' && char !== '}') { switch (char) { case '\n': { lineStart = offset + 1; var wsEnd = Node.endOfWhiteSpace(src, lineStart); if (src[wsEnd] === '\n') { var blankLine = new BlankLine(); lineStart = blankLine.parse({ src: src }, lineStart); this.items.push(blankLine); } offset = Node.endOfIndent(src, lineStart); if (offset <= lineStart + indent) { char = src[offset]; if (offset < lineStart + indent || char !== ']' && char !== '}') { var msg = 'Insufficient indentation in flow collection'; this.error = new YAMLSemanticError(this, msg); } } } break; case ',': { this.items.push({ char: char, offset: offset }); offset += 1; } break; case '#': { var comment = new Comment(); offset = comment.parse({ src: src }, offset); this.items.push(comment); } break; case '?': case ':': { var next = src[offset + 1]; if (next === '\n' || next === '\t' || next === ' ' || next === ',' || // in-flow : after JSON-like key does not need to be followed by whitespace char === ':' && this.prevNodeIsJsonLike()) { this.items.push({ char: char, offset: offset }); offset += 1; break; } } // fallthrough default: { var node = parseNode({ atLineStart: false, inCollection: false, inFlow: true, indent: -1, lineStart: lineStart, parent: this }, offset); if (!node) { // at next document start this.valueRange = new Range(start, offset); return offset; } this.items.push(node); offset = Node.normalizeOffset(src, node.range.end); } } offset = Node.endOfWhiteSpace(src, offset); char = src[offset]; } this.valueRange = new Range(start, offset + 1); if (char) { this.items.push({ char: char, offset: offset }); offset = Node.endOfWhiteSpace(src, offset + 1); offset = this.parseComment(offset); } return offset; } }, { key: "setOrigRanges", value: function setOrigRanges(cr, offset) { offset = _get(_getPrototypeOf(FlowCollection.prototype), "setOrigRanges", this).call(this, cr, offset); this.items.forEach(function (node) { if (node instanceof Node) { offset = node.setOrigRanges(cr, offset); } else if (cr.length === 0) { node.origOffset = node.offset; } else { var i = offset; while (i < cr.length) { if (cr[i] > node.offset) break;else ++i; } node.origOffset = node.offset + i; offset = i; } }); return offset; } }, { key: "toString", value: function toString() { var src = this.context.src, items = this.items, range = this.range, value = this.value; if (value != null) return value; var nodes = items.filter(function (item) { return item instanceof Node; }); var str = ''; var prevEnd = range.start; nodes.forEach(function (node) { var prefix = src.slice(prevEnd, node.range.start); prevEnd = node.range.end; str += prefix + String(node); if (str[str.length - 1] === '\n' && src[prevEnd - 1] !== '\n' && src[prevEnd] === '\n') { // Comment range does not include the terminal newline, but its // stringified value does. Without this fix, newlines at comment ends // get duplicated. prevEnd += 1; } }); str += src.slice(prevEnd, range.end); return Node.addStringTerminator(src, range.end, str); } }]); return FlowCollection; }(Node); var QuoteDouble = /*#__PURE__*/function (_Node) { _inherits(QuoteDouble, _Node); var _super = _createSuper(QuoteDouble); function QuoteDouble() { _classCallCheck(this, QuoteDouble); return _super.apply(this, arguments); } _createClass(QuoteDouble, [{ key: "strValue", get: /** * @returns {string | { str: string, errors: YAMLSyntaxError[] }} */ function get() { if (!this.valueRange || !this.context) return null; var errors = []; var _this$valueRange = this.valueRange, start = _this$valueRange.start, end = _this$valueRange.end; var _this$context = this.context, indent = _this$context.indent, src = _this$context.src; if (src[end - 1] !== '"') errors.push(new YAMLSyntaxError(this, 'Missing closing "quote')); // Using String#replace is too painful with escaped newlines preceded by // escaped backslashes; also, this should be faster. var str = ''; for (var i = start + 1; i < end - 1; ++i) { var ch = src[i]; if (ch === '\n') { if (Node.atDocumentBoundary(src, i + 1)) errors.push(new YAMLSemanticError(this, 'Document boundary indicators are not allowed within string values')); var _Node$foldNewline = Node.foldNewline(src, i, indent), fold = _Node$foldNewline.fold, offset = _Node$foldNewline.offset, error = _Node$foldNewline.error; str += fold; i = offset; if (error) errors.push(new YAMLSemanticError(this, 'Multi-line double-quoted string needs to be sufficiently indented')); } else if (ch === '\\') { i += 1; switch (src[i]) { case '0': str += '\0'; break; // null character case 'a': str += '\x07'; break; // bell character case 'b': str += '\b'; break; // backspace case 'e': str += '\x1b'; break; // escape character case 'f': str += '\f'; break; // form feed case 'n': str += '\n'; break; // line feed case 'r': str += '\r'; break; // carriage return case 't': str += '\t'; break; // horizontal tab case 'v': str += '\v'; break; // vertical tab case 'N': str += "\x85"; break; // Unicode next line case '_': str += "\xA0"; break; // Unicode non-breaking space case 'L': str += "\u2028"; break; // Unicode line separator case 'P': str += "\u2029"; break; // Unicode paragraph separator case ' ': str += ' '; break; case '"': str += '"'; break; case '/': str += '/'; break; case '\\': str += '\\'; break; case '\t': str += '\t'; break; case 'x': str += this.parseCharCode(i + 1, 2, errors); i += 2; break; case 'u': str += this.parseCharCode(i + 1, 4, errors); i += 4; break; case 'U': str += this.parseCharCode(i + 1, 8, errors); i += 8; break; case '\n': // skip escaped newlines, but still trim the following line while (src[i + 1] === ' ' || src[i + 1] === '\t') { i += 1; } break; default: errors.push(new YAMLSyntaxError(this, "Invalid escape sequence ".concat(src.substr(i - 1, 2)))); str += '\\' + src[i]; } } else if (ch === ' ' || ch === '\t') { // trim trailing whitespace var wsStart = i; var next = src[i + 1]; while (next === ' ' || next === '\t') { i += 1; next = src[i + 1]; } if (next !== '\n') str += i > wsStart ? src.slice(wsStart, i + 1) : ch; } else { str += ch; } } return errors.length > 0 ? { errors: errors, str: str } : str; } }, { key: "parseCharCode", value: function parseCharCode(offset, length, errors) { var src = this.context.src; var cc = src.substr(offset, length); var ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc); var code = ok ? parseInt(cc, 16) : NaN; if (isNaN(code)) { errors.push(new YAMLSyntaxError(this, "Invalid escape sequence ".concat(src.substr(offset - 2, length + 2)))); return src.substr(offset - 2, length + 2); } return String.fromCodePoint(code); } /** * Parses a "double quoted" value from the source * * @param {ParseContext} context * @param {number} start - Index of first character * @returns {number} - Index of the character after this scalar */ }, { key: "parse", value: function parse(context, start) { this.context = context; var src = context.src; var offset = QuoteDouble.endOfQuote(src, start + 1); this.valueRange = new Range(start, offset); offset = Node.endOfWhiteSpace(src, offset); offset = this.parseComment(offset); return offset; } }], [{ key: "endOfQuote", value: function endOfQuote(src, offset) { var ch = src[offset]; while (ch && ch !== '"') { offset += ch === '\\' ? 2 : 1; ch = src[offset]; } return offset + 1; } }]); return QuoteDouble; }(Node); var QuoteSingle = /*#__PURE__*/function (_Node) { _inherits(QuoteSingle, _Node); var _super = _createSuper(QuoteSingle); function QuoteSingle() { _classCallCheck(this, QuoteSingle); return _super.apply(this, arguments); } _createClass(QuoteSingle, [{ key: "strValue", get: /** * @returns {string | { str: string, errors: YAMLSyntaxError[] }} */ function get() { if (!this.valueRange || !this.context) return null; var errors = []; var _this$valueRange = this.valueRange, start = _this$valueRange.start, end = _this$valueRange.end; var _this$context = this.context, indent = _this$context.indent, src = _this$context.src; if (src[end - 1] !== "'") errors.push(new YAMLSyntaxError(this, "Missing closing 'quote")); var str = ''; for (var i = start + 1; i < end - 1; ++i) { var ch = src[i]; if (ch === '\n') { if (Node.atDocumentBoundary(src, i + 1)) errors.push(new YAMLSemanticError(this, 'Document boundary indicators are not allowed within string values')); var _Node$foldNewline = Node.foldNewline(src, i, indent), fold = _Node$foldNewline.fold, offset = _Node$foldNewline.offset, error = _Node$foldNewline.error; str += fold; i = offset; if (error) errors.push(new YAMLSemanticError(this, 'Multi-line single-quoted string needs to be sufficiently indented')); } else if (ch === "'") { str += ch; i += 1; if (src[i] !== "'") errors.push(new YAMLSyntaxError(this, 'Unescaped single quote? This should not happen.')); } else if (ch === ' ' || ch === '\t') { // trim trailing whitespace var wsStart = i; var next = src[i + 1]; while (next === ' ' || next === '\t') { i += 1; next = src[i + 1]; } if (next !== '\n') str += i > wsStart ? src.slice(wsStart, i + 1) : ch; } else { str += ch; } } return errors.length > 0 ? { errors: errors, str: str } : str; } /** * Parses a 'single quoted' value from the source * * @param {ParseContext} context * @param {number} start - Index of first character * @returns {number} - Index of the character after this scalar */ }, { key: "parse", value: function parse(context, start) { this.context = context; var src = context.src; var offset = QuoteSingle.endOfQuote(src, start + 1); this.valueRange = new Range(start, offset); offset = Node.endOfWhiteSpace(src, offset); offset = this.parseComment(offset); return offset; } }], [{ key: "endOfQuote", value: function endOfQuote(src, offset) { var ch = src[offset]; while (ch) { if (ch === "'") { if (src[offset + 1] !== "'") break; ch = src[offset += 2]; } else { ch = src[offset += 1]; } } return offset + 1; } }]); return QuoteSingle; }(Node); function createNewNode(type, props) { switch (type) { case Type.ALIAS: return new Alias(type, props); case Type.BLOCK_FOLDED: case Type.BLOCK_LITERAL: return new BlockValue(type, props); case Type.FLOW_MAP: case Type.FLOW_SEQ: return new FlowCollection(type, props); case Type.MAP_KEY: case Type.MAP_VALUE: case Type.SEQ_ITEM: return new CollectionItem(type, props); case Type.COMMENT: case Type.PLAIN: return new PlainValue(type, props); case Type.QUOTE_DOUBLE: return new QuoteDouble(type, props); case Type.QUOTE_SINGLE: return new QuoteSingle(type, props); /* istanbul ignore next */ default: return null; // should never happen } } /** * @param {boolean} atLineStart - Node starts at beginning of line * @param {boolean} inFlow - true if currently in a flow context * @param {boolean} inCollection - true if currently in a collection context * @param {number} indent - Current level of indentation * @param {number} lineStart - Start of the current line * @param {Node} parent - The parent of the node * @param {string} src - Source of the YAML document */ var ParseContext = /*#__PURE__*/function () { function ParseContext() { var _this = this; var orig = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, atLineStart = _ref.atLineStart, inCollection = _ref.inCollection, inFlow = _ref.inFlow, indent = _ref.indent, lineStart = _ref.lineStart, parent = _ref.parent; _classCallCheck(this, ParseContext); _defineProperty(this, "parseNode", function (overlay, start) { if (Node.atDocumentBoundary(_this.src, start)) return null; var context = new ParseContext(_this, overlay); var _context$parseProps = context.parseProps(start), props = _context$parseProps.props, type = _context$parseProps.type, valueStart = _context$parseProps.valueStart; var node = createNewNode(type, props); var offset = node.parse(context, valueStart); node.range = new Range(start, offset); /* istanbul ignore if */ if (offset <= start) { // This should never happen, but if it does, let's make sure to at least // step one character forward to avoid a busy loop. node.error = new Error("Node#parse consumed no characters"); node.error.parseEnd = offset; node.error.source = node; node.range.end = start + 1; } if (context.nodeStartsCollection(node)) { if (!node.error && !context.atLineStart && context.parent.type === Type.DOCUMENT) { node.error = new YAMLSyntaxError(node, 'Block collection must not have preceding content here (e.g. directives-end indicator)'); } var collection = new Collection(node); offset = collection.parse(new ParseContext(context), offset); collection.range = new Range(start, offset); return collection; } return node; }); this.atLineStart = atLineStart != null ? atLineStart : orig.atLineStart || false; this.inCollection = inCollection != null ? inCollection : orig.inCollection || false; this.inFlow = inFlow != null ? inFlow : orig.inFlow || false; this.indent = indent != null ? indent : orig.indent; this.lineStart = lineStart != null ? lineStart : orig.lineStart; this.parent = parent != null ? parent : orig.parent || {}; this.root = orig.root; this.src = orig.src; } _createClass(ParseContext, [{ key: "nodeStartsCollection", value: function nodeStartsCollection(node) { var inCollection = this.inCollection, inFlow = this.inFlow, src = this.src; if (inCollection || inFlow) return false; if (node instanceof CollectionItem) return true; // check for implicit key var offset = node.range.end; if (src[offset] === '\n' || src[offset - 1] === '\n') return false; offset = Node.endOfWhiteSpace(src, offset); return src[offset] === ':'; } // Anchor and tag are before type, which determines the node implementation // class; hence this intermediate step. }, { key: "parseProps", value: function parseProps(offset) { var inFlow = this.inFlow, parent = this.parent, src = this.src; var props = []; var lineHasProps = false; offset = this.atLineStart ? Node.endOfIndent(src, offset) : Node.endOfWhiteSpace(src, offset); var ch = src[offset]; while (ch === Char.ANCHOR || ch === Char.COMMENT || ch === Char.TAG || ch === '\n') { if (ch === '\n') { var inEnd = offset; var lineStart = void 0; do { lineStart = inEnd + 1; inEnd = Node.endOfIndent(src, lineStart); } while (src[inEnd] === '\n'); var indentDiff = inEnd - (lineStart + this.indent); var noIndicatorAsIndent = parent.type === Type.SEQ_ITEM && parent.context.atLineStart; if (src[inEnd] !== '#' && !Node.nextNodeIsIndented(src[inEnd], indentDiff, !noIndicatorAsIndent)) break; this.atLineStart = true; this.lineStart = lineStart; lineHasProps = false; offset = inEnd; } else if (ch === Char.COMMENT) { var end = Node.endOfLine(src, offset + 1); props.push(new Range(offset, end)); offset = end; } else { var _end = Node.endOfIdentifier(src, offset + 1); if (ch === Char.TAG && src[_end] === ',' && /^[a-zA-Z0-9-]+\.[a-zA-Z0-9-]+,\d\d\d\d(-\d\d){0,2}\/\S/.test(src.slice(offset + 1, _end + 13))) { // Let's presume we're dealing with a YAML 1.0 domain tag here, rather // than an empty but 'foo.bar' private-tagged node in a flow collection // followed without whitespace by a plain string starting with a year // or date divided by something. _end = Node.endOfIdentifier(src, _end + 5); } props.push(new Range(offset, _end)); lineHasProps = true; offset = Node.endOfWhiteSpace(src, _end); } ch = src[offset]; } // '- &a : b' has an anchor on an empty node if (lineHasProps && ch === ':' && Node.atBlank(src, offset + 1, true)) offset -= 1; var type = ParseContext.parseType(src, offset, inFlow); return { props: props, type: type, valueStart: offset }; } /** * Parses a node from the source * @param {ParseContext} overlay * @param {number} start - Index of first non-whitespace character for the node * @returns {?Node} - null if at a document boundary */ }], [{ key: "parseType", value: function parseType(src, offset, inFlow) { switch (src[offset]) { case '*': return Type.ALIAS; case '>': return Type.BLOCK_FOLDED; case '|': return Type.BLOCK_LITERAL; case '{': return Type.FLOW_MAP; case '[': return Type.FLOW_SEQ; case '?': return !inFlow && Node.atBlank(src, offset + 1, true) ? Type.MAP_KEY : Type.PLAIN; case ':': return !inFlow && Node.atBlank(src, offset + 1, true) ? Type.MAP_VALUE : Type.PLAIN; case '-': return !inFlow && Node.atBlank(src, offset + 1, true) ? Type.SEQ_ITEM : Type.PLAIN; case '"': return Type.QUOTE_DOUBLE; case "'": return Type.QUOTE_SINGLE; default: return Type.PLAIN; } } }]); return ParseContext; }(); // Published as 'yaml/parse-cst' function parse(src) { var cr = []; if (src.indexOf('\r') !== -1) { src = src.replace(/\r\n?/g, function (match, offset) { if (match.length > 1) cr.push(offset); return '\n'; }); } var documents = []; var offset = 0; do { var doc = new Document(); var context = new ParseContext({ src: src }); offset = doc.parse(context, offset); documents.push(doc); } while (offset < src.length); documents.setOrigRanges = function () { if (cr.length === 0) return false; for (var i = 1; i < cr.length; ++i) { cr[i] -= i; } var crOffset = 0; for (var _i = 0; _i < documents.length; ++_i) { crOffset = documents[_i].setOrigRanges(cr, crOffset); } cr.splice(0, cr.length); return true; }; documents.toString = function () { return documents.join('...\n'); }; return documents; } export { parse };
GoogleCloudPlatform/prometheus-engine
third_party/prometheus_ui/base/web/ui/react-app/node_modules/yaml/browser/dist/parse-cst.js
JavaScript
apache-2.0
54,633
//----------------------------------------------------------------------- // <copyright file="AkkaSpecExtensions.cs" company="Akka.NET Project"> // Copyright (C) 2009-2016 Lightbend Inc. <http://www.lightbend.com> // Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net> // </copyright> //----------------------------------------------------------------------- using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Akka.Util.Internal; using Xunit; // ReSharper disable once CheckNamespace namespace Akka.TestKit { public static class AkkaSpecExtensions { public static void Should<T>(this T self, Func<T, bool> isValid, string message) { Assert.True(isValid(self), message ?? "Value did not meet criteria. Value: " + self); } public static void ShouldHaveCount<T>(this IReadOnlyCollection<T> self, int expectedCount) { Assert.Equal(expectedCount, self.Count); } public static void ShouldBe<T>(this IEnumerable<T> self, IEnumerable<T> other) { Assert.True(self.SequenceEqual(other), "Expected " + other.Select(i => string.Format("'{0}'", i)).Join(",") + " got " + self.Select(i => string.Format("'{0}'", i)).Join(",")); } public static void ShouldBe<T>(this T self, T expected, string message = null) { Assert.Equal(expected, self); } public static void ShouldNotBe<T>(this T self, T expected, string message = null) { Assert.NotEqual(expected, self); } public static void ShouldBeSame<T>(this T self, T expected, string message = null) { Assert.Same(expected, self); } public static void ShouldNotBeSame<T>(this T self, T expected, string message = null) { Assert.NotSame(expected, self); } public static void ShouldBeTrue(this bool b, string message = null) { Assert.True(b); } public static void ShouldBeFalse(this bool b, string message = null) { Assert.False(b); } public static void ShouldBeLessThan<T>(this T actual, T value, string message = null) where T : IComparable<T> { var comparisonResult = actual.CompareTo(value); Assert.True(comparisonResult < 0, "Expected Actual: " + actual + " to be less than " + value); } public static void ShouldBeLessOrEqualTo<T>(this T actual, T value, string message = null) where T : IComparable<T> { var comparisonResult = actual.CompareTo(value); Assert.True(comparisonResult <= 0, "Expected Actual: " + actual + " to be less than " + value); } public static void ShouldBeGreaterThan<T>(this T actual, T value, string message = null) where T : IComparable<T> { var comparisonResult = actual.CompareTo(value); Assert.True(comparisonResult > 0, "Expected Actual: " + actual + " to be less than " + value); } public static void ShouldBeGreaterOrEqual<T>(this T actual, T value, string message = null) where T : IComparable<T> { var comparisonResult = actual.CompareTo(value); Assert.True(comparisonResult >= 0, "Expected Actual: " + actual + " to be less than " + value); } public static void ShouldStartWith(this string s, string start, string message = null) { Assert.Equal(s.Substring(0, Math.Min(s.Length, start.Length)), start); } public static void ShouldOnlyContainInOrder<T>(this IEnumerable<T> actual, params T[] expected) { ShouldBe(actual, expected); } public static async Task ThrowsAsync<TException>(Func<Task> func) { var expected = typeof(TException); Type actual = null; try { await func(); } catch (Exception e) { actual = e.GetType(); } Assert.Equal(expected, actual); } } }
nanderto/akka.net
src/core/Akka.Tests.Shared.Internals/AkkaSpecExtensions.cs
C#
apache-2.0
4,207
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.util; import org.junit.Assert; import org.junit.Test; import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; import java.net.SocketTimeoutException; import java.net.URL; import java.net.UnknownHostException; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import static org.hamcrest.core.IsCollectionContaining.hasItems; import static org.hamcrest.core.IsNot.not; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; /** Tests for the {@link NetUtils}. */ public class NetUtilsTest extends TestLogger { @Test public void testCorrectHostnamePort() throws Exception { final URL url = new URL("http", "foo.com", 8080, "/index.html"); assertEquals(url, NetUtils.getCorrectHostnamePort("foo.com:8080/index.html")); } @Test public void testParseHostPortAddress() { final InetSocketAddress socketAddress = new InetSocketAddress("foo.com", 8080); assertEquals(socketAddress, NetUtils.parseHostPortAddress("foo.com:8080")); } @Test public void testAcceptWithoutTimeoutSuppressesTimeoutException() throws IOException { // Validates that acceptWithoutTimeout suppresses all SocketTimeoutExceptions Socket expected = new Socket(); ServerSocket serverSocket = new ServerSocket() { private int count = 0; @Override public Socket accept() throws IOException { if (count < 2) { count++; throw new SocketTimeoutException(); } return expected; } }; assertEquals(expected, NetUtils.acceptWithoutTimeout(serverSocket)); } @Test public void testAcceptWithoutTimeoutDefaultTimeout() throws IOException { // Default timeout (should be zero) final Socket expected = new Socket(); try (final ServerSocket serverSocket = new ServerSocket(0) { @Override public Socket accept() { return expected; } }) { assertEquals(expected, NetUtils.acceptWithoutTimeout(serverSocket)); } } @Test public void testAcceptWithoutTimeoutZeroTimeout() throws IOException { // Explicitly sets a timeout of zero final Socket expected = new Socket(); try (final ServerSocket serverSocket = new ServerSocket(0) { @Override public Socket accept() { return expected; } }) { serverSocket.setSoTimeout(0); assertEquals(expected, NetUtils.acceptWithoutTimeout(serverSocket)); } } @Test(expected = IllegalArgumentException.class) public void testAcceptWithoutTimeoutRejectsSocketWithSoTimeout() throws IOException { try (final ServerSocket serverSocket = new ServerSocket(0)) { serverSocket.setSoTimeout(5); NetUtils.acceptWithoutTimeout(serverSocket); } } @Test public void testIPv4toURL() { try { final String addressString = "192.168.0.1"; InetAddress address = InetAddress.getByName(addressString); assertEquals(addressString, NetUtils.ipAddressToUrlString(address)); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testIPv6toURL() { try { final String addressString = "2001:01db8:00:0:00:ff00:42:8329"; final String normalizedAddress = "[2001:1db8::ff00:42:8329]"; InetAddress address = InetAddress.getByName(addressString); assertEquals(normalizedAddress, NetUtils.ipAddressToUrlString(address)); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testIPv4URLEncoding() { try { final String addressString = "10.244.243.12"; final int port = 23453; InetAddress address = InetAddress.getByName(addressString); InetSocketAddress socketAddress = new InetSocketAddress(address, port); assertEquals(addressString, NetUtils.ipAddressToUrlString(address)); assertEquals( addressString + ':' + port, NetUtils.ipAddressAndPortToUrlString(address, port)); assertEquals( addressString + ':' + port, NetUtils.socketAddressToUrlString(socketAddress)); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testIPv6URLEncoding() { try { final String addressString = "2001:db8:10:11:12:ff00:42:8329"; final String bracketedAddressString = '[' + addressString + ']'; final int port = 23453; InetAddress address = InetAddress.getByName(addressString); InetSocketAddress socketAddress = new InetSocketAddress(address, port); assertEquals(bracketedAddressString, NetUtils.ipAddressToUrlString(address)); assertEquals( bracketedAddressString + ':' + port, NetUtils.ipAddressAndPortToUrlString(address, port)); assertEquals( bracketedAddressString + ':' + port, NetUtils.socketAddressToUrlString(socketAddress)); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testFreePortRangeUtility() { // inspired by Hadoop's example for "yarn.app.mapreduce.am.job.client.port-range" String rangeDefinition = "50000-50050, 50100-50200,51234 "; // this also contains some whitespaces Iterator<Integer> portsIter = NetUtils.getPortRangeFromString(rangeDefinition); Set<Integer> ports = new HashSet<>(); while (portsIter.hasNext()) { Assert.assertTrue("Duplicate element", ports.add(portsIter.next())); } Assert.assertEquals(51 + 101 + 1, ports.size()); // check first range Assert.assertThat(ports, hasItems(50000, 50001, 50002, 50050)); // check second range and last point Assert.assertThat(ports, hasItems(50100, 50101, 50110, 50200, 51234)); // check that only ranges are included Assert.assertThat(ports, not(hasItems(50051, 50052, 1337, 50201, 49999, 50099))); // test single port "range": portsIter = NetUtils.getPortRangeFromString(" 51234"); Assert.assertTrue(portsIter.hasNext()); Assert.assertEquals(51234, (int) portsIter.next()); Assert.assertFalse(portsIter.hasNext()); // test port list portsIter = NetUtils.getPortRangeFromString("5,1,2,3,4"); Assert.assertTrue(portsIter.hasNext()); Assert.assertEquals(5, (int) portsIter.next()); Assert.assertEquals(1, (int) portsIter.next()); Assert.assertEquals(2, (int) portsIter.next()); Assert.assertEquals(3, (int) portsIter.next()); Assert.assertEquals(4, (int) portsIter.next()); Assert.assertFalse(portsIter.hasNext()); Throwable error = null; // try some wrong values: String try { NetUtils.getPortRangeFromString("localhost"); } catch (Throwable t) { error = t; } Assert.assertTrue(error instanceof NumberFormatException); error = null; // incomplete range try { NetUtils.getPortRangeFromString("5-"); } catch (Throwable t) { error = t; } Assert.assertTrue(error instanceof NumberFormatException); error = null; // incomplete range try { NetUtils.getPortRangeFromString("-5"); } catch (Throwable t) { error = t; } Assert.assertTrue(error instanceof NumberFormatException); error = null; // empty range try { NetUtils.getPortRangeFromString(",5"); } catch (Throwable t) { error = t; } Assert.assertTrue(error instanceof NumberFormatException); } @Test public void testFormatAddress() throws UnknownHostException { { // IPv4 String host = "1.2.3.4"; int port = 42; Assert.assertEquals( host + ":" + port, NetUtils.unresolvedHostAndPortToNormalizedString(host, port)); } { // IPv6 String host = "2001:0db8:85a3:0000:0000:8a2e:0370:7334"; int port = 42; Assert.assertEquals( "[2001:db8:85a3::8a2e:370:7334]:" + port, NetUtils.unresolvedHostAndPortToNormalizedString(host, port)); } { // [IPv6] String host = "[2001:0db8:85a3:0000:0000:8a2e:0370:7334]"; int port = 42; Assert.assertEquals( "[2001:db8:85a3::8a2e:370:7334]:" + port, NetUtils.unresolvedHostAndPortToNormalizedString(host, port)); } { // Hostnames String host = "somerandomhostname"; int port = 99; Assert.assertEquals( host + ":" + port, NetUtils.unresolvedHostAndPortToNormalizedString(host, port)); } { // Whitespace String host = " somerandomhostname "; int port = 99; Assert.assertEquals( host.trim() + ":" + port, NetUtils.unresolvedHostAndPortToNormalizedString(host, port)); } { // Illegal hostnames String host = "illegalhost."; int port = 42; try { NetUtils.unresolvedHostAndPortToNormalizedString(host, port); fail(); } catch (Exception ignored) { } // Illegal hostnames host = "illegalhost:fasf"; try { NetUtils.unresolvedHostAndPortToNormalizedString(host, port); fail(); } catch (Exception ignored) { } } { // Illegal port ranges String host = "1.2.3.4"; int port = -1; try { NetUtils.unresolvedHostAndPortToNormalizedString(host, port); fail(); } catch (Exception ignored) { } } { // lower case conversion of hostnames String host = "CamelCaseHostName"; int port = 99; Assert.assertEquals( host.toLowerCase() + ":" + port, NetUtils.unresolvedHostAndPortToNormalizedString(host, port)); } } }
apache/flink
flink-core/src/test/java/org/apache/flink/util/NetUtilsTest.java
Java
apache-2.0
12,115
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.Azure; using Microsoft.Azure.Management.ApiManagement; using Microsoft.Azure.Management.ApiManagement.Models; namespace Microsoft.Azure.Management.ApiManagement { /// <summary> /// .Net client wrapper for the REST API for Azure ApiManagement Service /// </summary> public static partial class ResourceProviderOperationsExtensions { /// <summary> /// Backs up an Api Management service. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the Backup operation. /// </param> /// <returns> /// The response of the CreateOrUpdate Api Management service long /// running operation. /// </returns> public static ApiServiceLongRunningOperationResponse Backup(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceBackupRestoreParameters parameters) { return Task.Factory.StartNew((object s) => { return ((IResourceProviderOperations)s).BackupAsync(resourceGroupName, name, parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Backs up an Api Management service. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the Backup operation. /// </param> /// <returns> /// The response of the CreateOrUpdate Api Management service long /// running operation. /// </returns> public static Task<ApiServiceLongRunningOperationResponse> BackupAsync(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceBackupRestoreParameters parameters) { return operations.BackupAsync(resourceGroupName, name, parameters, CancellationToken.None); } /// <summary> /// Begin backup operation of an Api Management service.To determine /// whether the operation has finished processing the request, call /// GetLongRunningOperationStatus. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the BeginBackup operation. /// </param> /// <returns> /// The response of the CreateOrUpdate Api Management service long /// running operation. /// </returns> public static ApiServiceLongRunningOperationResponse BeginBackup(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceBackupRestoreParameters parameters) { return Task.Factory.StartNew((object s) => { return ((IResourceProviderOperations)s).BeginBackupAsync(resourceGroupName, name, parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Begin backup operation of an Api Management service.To determine /// whether the operation has finished processing the request, call /// GetLongRunningOperationStatus. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the BeginBackup operation. /// </param> /// <returns> /// The response of the CreateOrUpdate Api Management service long /// running operation. /// </returns> public static Task<ApiServiceLongRunningOperationResponse> BeginBackupAsync(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceBackupRestoreParameters parameters) { return operations.BeginBackupAsync(resourceGroupName, name, parameters, CancellationToken.None); } /// <summary> /// Begins creating new or updating existing Api Management service.To /// determine whether the operation has finished processing the /// request, call GetApiServiceLongRunningOperationStatus. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the CreateOrUpdate Api Management /// service operation. /// </param> /// <returns> /// The response of the CreateOrUpdate Api Management service long /// running operation. /// </returns> public static ApiServiceLongRunningOperationResponse BeginCreatingOrUpdating(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceCreateOrUpdateParameters parameters) { return Task.Factory.StartNew((object s) => { return ((IResourceProviderOperations)s).BeginCreatingOrUpdatingAsync(resourceGroupName, name, parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Begins creating new or updating existing Api Management service.To /// determine whether the operation has finished processing the /// request, call GetApiServiceLongRunningOperationStatus. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the CreateOrUpdate Api Management /// service operation. /// </param> /// <returns> /// The response of the CreateOrUpdate Api Management service long /// running operation. /// </returns> public static Task<ApiServiceLongRunningOperationResponse> BeginCreatingOrUpdatingAsync(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceCreateOrUpdateParameters parameters) { return operations.BeginCreatingOrUpdatingAsync(resourceGroupName, name, parameters, CancellationToken.None); } /// <summary> /// Begin to manage (CUD) deployments of an Api Management service.To /// determine whether the operation has finished processing the /// request, call GetLongRunningOperationStatus. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the ManageDeployments operation. /// </param> /// <returns> /// The response of the CreateOrUpdate Api Management service long /// running operation. /// </returns> public static ApiServiceLongRunningOperationResponse BeginManagingDeployments(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceManageDeploymentsParameters parameters) { return Task.Factory.StartNew((object s) => { return ((IResourceProviderOperations)s).BeginManagingDeploymentsAsync(resourceGroupName, name, parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Begin to manage (CUD) deployments of an Api Management service.To /// determine whether the operation has finished processing the /// request, call GetLongRunningOperationStatus. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the ManageDeployments operation. /// </param> /// <returns> /// The response of the CreateOrUpdate Api Management service long /// running operation. /// </returns> public static Task<ApiServiceLongRunningOperationResponse> BeginManagingDeploymentsAsync(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceManageDeploymentsParameters parameters) { return operations.BeginManagingDeploymentsAsync(resourceGroupName, name, parameters, CancellationToken.None); } /// <summary> /// Begin restore from backup operation of an Api Management service.To /// determine whether the operation has finished processing the /// request, call GetLongRunningOperationStatus. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the Restore Api Management service /// from backup operation. /// </param> /// <returns> /// The response of the CreateOrUpdate Api Management service long /// running operation. /// </returns> public static ApiServiceLongRunningOperationResponse BeginRestoring(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceBackupRestoreParameters parameters) { return Task.Factory.StartNew((object s) => { return ((IResourceProviderOperations)s).BeginRestoringAsync(resourceGroupName, name, parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Begin restore from backup operation of an Api Management service.To /// determine whether the operation has finished processing the /// request, call GetLongRunningOperationStatus. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the Restore Api Management service /// from backup operation. /// </param> /// <returns> /// The response of the CreateOrUpdate Api Management service long /// running operation. /// </returns> public static Task<ApiServiceLongRunningOperationResponse> BeginRestoringAsync(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceBackupRestoreParameters parameters) { return operations.BeginRestoringAsync(resourceGroupName, name, parameters, CancellationToken.None); } /// <summary> /// Begin updating hostname of an Api Management service.To determine /// whether the operation has finished processing the request, call /// GetLongRunningOperationStatus. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the UpdateHostname operation. /// </param> /// <returns> /// The response of the CreateOrUpdate Api Management service long /// running operation. /// </returns> public static ApiServiceLongRunningOperationResponse BeginUpdatingHostname(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceUpdateHostnameParameters parameters) { return Task.Factory.StartNew((object s) => { return ((IResourceProviderOperations)s).BeginUpdatingHostnameAsync(resourceGroupName, name, parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Begin updating hostname of an Api Management service.To determine /// whether the operation has finished processing the request, call /// GetLongRunningOperationStatus. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the UpdateHostname operation. /// </param> /// <returns> /// The response of the CreateOrUpdate Api Management service long /// running operation. /// </returns> public static Task<ApiServiceLongRunningOperationResponse> BeginUpdatingHostnameAsync(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceUpdateHostnameParameters parameters) { return operations.BeginUpdatingHostnameAsync(resourceGroupName, name, parameters, CancellationToken.None); } /// <summary> /// Checks whether the custom host name maps to an Api Management /// service. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the CheckCustomHostname operation. /// </param> /// <returns> /// The response of the CheckCustomHostname operation. /// </returns> public static ApiServiceCheckCustomHostnameResponse CheckCustomHostname(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceCheckCustomHostnameParameters parameters) { return Task.Factory.StartNew((object s) => { return ((IResourceProviderOperations)s).CheckCustomHostnameAsync(resourceGroupName, name, parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Checks whether the custom host name maps to an Api Management /// service. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the CheckCustomHostname operation. /// </param> /// <returns> /// The response of the CheckCustomHostname operation. /// </returns> public static Task<ApiServiceCheckCustomHostnameResponse> CheckCustomHostnameAsync(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceCheckCustomHostnameParameters parameters) { return operations.CheckCustomHostnameAsync(resourceGroupName, name, parameters, CancellationToken.None); } /// <summary> /// Checks availability and correctness of a name for an Api Management /// service. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the CheckNameAvailability /// operation. /// </param> /// <returns> /// Response of the CheckNameAvailability operation. /// </returns> public static ApiServiceNameAvailabilityResponse CheckNameAvailability(this IResourceProviderOperations operations, ApiServiceCheckNameAvailabilityParameters parameters) { return Task.Factory.StartNew((object s) => { return ((IResourceProviderOperations)s).CheckNameAvailabilityAsync(parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Checks availability and correctness of a name for an Api Management /// service. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the CheckNameAvailability /// operation. /// </param> /// <returns> /// Response of the CheckNameAvailability operation. /// </returns> public static Task<ApiServiceNameAvailabilityResponse> CheckNameAvailabilityAsync(this IResourceProviderOperations operations, ApiServiceCheckNameAvailabilityParameters parameters) { return operations.CheckNameAvailabilityAsync(parameters, CancellationToken.None); } /// <summary> /// Creates new or updates existing Api Management service /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the CreateOrUpdate Api Management /// service operation. /// </param> /// <returns> /// The response of the CreateOrUpdate Api Management service long /// running operation. /// </returns> public static ApiServiceLongRunningOperationResponse CreateOrUpdate(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceCreateOrUpdateParameters parameters) { return Task.Factory.StartNew((object s) => { return ((IResourceProviderOperations)s).CreateOrUpdateAsync(resourceGroupName, name, parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Creates new or updates existing Api Management service /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the CreateOrUpdate Api Management /// service operation. /// </param> /// <returns> /// The response of the CreateOrUpdate Api Management service long /// running operation. /// </returns> public static Task<ApiServiceLongRunningOperationResponse> CreateOrUpdateAsync(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceCreateOrUpdateParameters parameters) { return operations.CreateOrUpdateAsync(resourceGroupName, name, parameters, CancellationToken.None); } /// <summary> /// Deletes existing Api Management service /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public static AzureOperationResponse Delete(this IResourceProviderOperations operations, string resourceGroupName, string name) { return Task.Factory.StartNew((object s) => { return ((IResourceProviderOperations)s).DeleteAsync(resourceGroupName, name); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Deletes existing Api Management service /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public static Task<AzureOperationResponse> DeleteAsync(this IResourceProviderOperations operations, string resourceGroupName, string name) { return operations.DeleteAsync(resourceGroupName, name, CancellationToken.None); } /// <summary> /// Get an Api Management service resource description. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <returns> /// The response of the Get Api Management service operation. /// </returns> public static ApiServiceGetResponse Get(this IResourceProviderOperations operations, string resourceGroupName, string name) { return Task.Factory.StartNew((object s) => { return ((IResourceProviderOperations)s).GetAsync(resourceGroupName, name); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Get an Api Management service resource description. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <returns> /// The response of the Get Api Management service operation. /// </returns> public static Task<ApiServiceGetResponse> GetAsync(this IResourceProviderOperations operations, string resourceGroupName, string name) { return operations.GetAsync(resourceGroupName, name, CancellationToken.None); } /// <summary> /// The Get ApiService Operation Status operation returns the status of /// the create or update operation. After calling the operation, you /// can call Get ApiService Operation Status to determine whether the /// operation has succeeded, failed, or is still in progress. This /// method differs GetLongRunningOperationStatus in providing Api /// Management service resource description. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='operationStatusLink'> /// Required. Location value returned by the Begin operation. /// </param> /// <returns> /// The response of the CreateOrUpdate Api Management service long /// running operation. /// </returns> public static ApiServiceLongRunningOperationResponse GetApiServiceLongRunningOperationStatus(this IResourceProviderOperations operations, string operationStatusLink) { return Task.Factory.StartNew((object s) => { return ((IResourceProviderOperations)s).GetApiServiceLongRunningOperationStatusAsync(operationStatusLink); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// The Get ApiService Operation Status operation returns the status of /// the create or update operation. After calling the operation, you /// can call Get ApiService Operation Status to determine whether the /// operation has succeeded, failed, or is still in progress. This /// method differs GetLongRunningOperationStatus in providing Api /// Management service resource description. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='operationStatusLink'> /// Required. Location value returned by the Begin operation. /// </param> /// <returns> /// The response of the CreateOrUpdate Api Management service long /// running operation. /// </returns> public static Task<ApiServiceLongRunningOperationResponse> GetApiServiceLongRunningOperationStatusAsync(this IResourceProviderOperations operations, string operationStatusLink) { return operations.GetApiServiceLongRunningOperationStatusAsync(operationStatusLink, CancellationToken.None); } /// <summary> /// The Get Operation Status operation returns the status of the /// specified operation. After calling an asynchronous operation, you /// can call Get Operation Status to determine whether the operation /// has succeeded, failed, or is still in progress. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='operationStatusLink'> /// Required. Location value returned by the Begin operation. /// </param> /// <returns> /// A standard service response for long running operations. /// </returns> public static LongRunningOperationResponse GetLongRunningOperationStatus(this IResourceProviderOperations operations, string operationStatusLink) { return Task.Factory.StartNew((object s) => { return ((IResourceProviderOperations)s).GetLongRunningOperationStatusAsync(operationStatusLink); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// The Get Operation Status operation returns the status of the /// specified operation. After calling an asynchronous operation, you /// can call Get Operation Status to determine whether the operation /// has succeeded, failed, or is still in progress. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='operationStatusLink'> /// Required. Location value returned by the Begin operation. /// </param> /// <returns> /// A standard service response for long running operations. /// </returns> public static Task<LongRunningOperationResponse> GetLongRunningOperationStatusAsync(this IResourceProviderOperations operations, string operationStatusLink) { return operations.GetLongRunningOperationStatusAsync(operationStatusLink, CancellationToken.None); } /// <summary> /// Gets SsoToken for an Api Management service. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <returns> /// The response of the GetSsoToken operation. /// </returns> public static ApiServiceGetSsoTokenResponse GetSsoToken(this IResourceProviderOperations operations, string resourceGroupName, string name) { return Task.Factory.StartNew((object s) => { return ((IResourceProviderOperations)s).GetSsoTokenAsync(resourceGroupName, name); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Gets SsoToken for an Api Management service. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <returns> /// The response of the GetSsoToken operation. /// </returns> public static Task<ApiServiceGetSsoTokenResponse> GetSsoTokenAsync(this IResourceProviderOperations operations, string resourceGroupName, string name) { return operations.GetSsoTokenAsync(resourceGroupName, name, CancellationToken.None); } /// <summary> /// List all Api Management services within a resource group. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Optional. The name of the resource group. If resourceGroupName /// value is null the method lists all Api Management services within /// subscription /// </param> /// <returns> /// The response of the List Api Management services operation. /// </returns> public static ApiServiceListResponse List(this IResourceProviderOperations operations, string resourceGroupName) { return Task.Factory.StartNew((object s) => { return ((IResourceProviderOperations)s).ListAsync(resourceGroupName); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// List all Api Management services within a resource group. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Optional. The name of the resource group. If resourceGroupName /// value is null the method lists all Api Management services within /// subscription /// </param> /// <returns> /// The response of the List Api Management services operation. /// </returns> public static Task<ApiServiceListResponse> ListAsync(this IResourceProviderOperations operations, string resourceGroupName) { return operations.ListAsync(resourceGroupName, CancellationToken.None); } /// <summary> /// Manage (CUD) deployments of an Api Management service. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the ManageDeployments operation. /// </param> /// <returns> /// The response of the CreateOrUpdate Api Management service long /// running operation. /// </returns> public static ApiServiceLongRunningOperationResponse ManageDeployments(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceManageDeploymentsParameters parameters) { return Task.Factory.StartNew((object s) => { return ((IResourceProviderOperations)s).ManageDeploymentsAsync(resourceGroupName, name, parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Manage (CUD) deployments of an Api Management service. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the ManageDeployments operation. /// </param> /// <returns> /// The response of the CreateOrUpdate Api Management service long /// running operation. /// </returns> public static Task<ApiServiceLongRunningOperationResponse> ManageDeploymentsAsync(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceManageDeploymentsParameters parameters) { return operations.ManageDeploymentsAsync(resourceGroupName, name, parameters, CancellationToken.None); } /// <summary> /// Restore an Api Management service from backup. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the Restore Api Management service /// from backup operation. /// </param> /// <returns> /// The response of the CreateOrUpdate Api Management service long /// running operation. /// </returns> public static ApiServiceLongRunningOperationResponse Restore(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceBackupRestoreParameters parameters) { return Task.Factory.StartNew((object s) => { return ((IResourceProviderOperations)s).RestoreAsync(resourceGroupName, name, parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Restore an Api Management service from backup. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the Restore Api Management service /// from backup operation. /// </param> /// <returns> /// The response of the CreateOrUpdate Api Management service long /// running operation. /// </returns> public static Task<ApiServiceLongRunningOperationResponse> RestoreAsync(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceBackupRestoreParameters parameters) { return operations.RestoreAsync(resourceGroupName, name, parameters, CancellationToken.None); } /// <summary> /// Update hostname of an Api Management service. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the UpdateHostname operation. /// </param> /// <returns> /// The response of the CreateOrUpdate Api Management service long /// running operation. /// </returns> public static ApiServiceLongRunningOperationResponse UpdateHostname(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceUpdateHostnameParameters parameters) { return Task.Factory.StartNew((object s) => { return ((IResourceProviderOperations)s).UpdateHostnameAsync(resourceGroupName, name, parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Update hostname of an Api Management service. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the UpdateHostname operation. /// </param> /// <returns> /// The response of the CreateOrUpdate Api Management service long /// running operation. /// </returns> public static Task<ApiServiceLongRunningOperationResponse> UpdateHostnameAsync(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceUpdateHostnameParameters parameters) { return operations.UpdateHostnameAsync(resourceGroupName, name, parameters, CancellationToken.None); } /// <summary> /// Upload SSL certificate for an Api Management service. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the Upload SSL certificate for an /// Api Management service operation. /// </param> /// <returns> /// The response of the Upload SSL certificate for an Api Management /// service operation. /// </returns> public static ApiServiceUploadCertificateResponse UploadCertificate(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceUploadCertificateParameters parameters) { return Task.Factory.StartNew((object s) => { return ((IResourceProviderOperations)s).UploadCertificateAsync(resourceGroupName, name, parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Upload SSL certificate for an Api Management service. /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations. /// </param> /// <param name='resourceGroupName'> /// Required. The name of the resource group. /// </param> /// <param name='name'> /// Required. The name of the Api Management service. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the Upload SSL certificate for an /// Api Management service operation. /// </param> /// <returns> /// The response of the Upload SSL certificate for an Api Management /// service operation. /// </returns> public static Task<ApiServiceUploadCertificateResponse> UploadCertificateAsync(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceUploadCertificateParameters parameters) { return operations.UploadCertificateAsync(resourceGroupName, name, parameters, CancellationToken.None); } } }
nemanja88/azure-sdk-for-net
src/ResourceManagement/ApiManagement/ApiManagementManagement/Generated/ResourceProviderOperationsExtensions.cs
C#
apache-2.0
48,468
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.netty; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.test.junit5.CamelTestSupport; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.junit.jupiter.api.Assertions.assertEquals; @Disabled("This test can be run manually") public class NettyRequestTimeoutIssueTest extends CamelTestSupport { private static final Logger LOG = LoggerFactory.getLogger(NettyProducerHangTest.class); @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { from("direct:out") .to("netty:tcp://localhost:8080?requestTimeout=5000"); from("netty:tcp://localhost:8080") .to("log:nettyCase?showAll=true&multiline=true"); } }; } @Test public void test() throws Exception { String result = template.requestBody("direct:out", "hello", String.class); assertEquals("hello", result); LOG.info("Sleeping for 20 seconds, and no Netty exception should occur"); Thread.sleep(20000); } }
nikhilvibhav/camel
components/camel-netty/src/test/java/org/apache/camel/component/netty/NettyRequestTimeoutIssueTest.java
Java
apache-2.0
2,106
newparam(:destinationtype) do include EasyType desc 'The destination type of a jms resource (queue or topic)' newvalues(:queue, :topic) to_translate_to_resource do | raw_resource | raw_resource['destinationtype'] end end
rcompos/biemond-orawls
orawls/lib/puppet/type/wls_jms_security_policy/destinationtype.rb
Ruby
apache-2.0
239
// Copyright The OpenTelemetry Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package otel // import "go.opentelemetry.io/otel" import ( "go.opentelemetry.io/otel/internal/global" "go.opentelemetry.io/otel/trace" ) // Tracer creates a named tracer that implements Tracer interface. // If the name is an empty string then provider uses default name. // // This is short for GetTracerProvider().Tracer(name, opts...) func Tracer(name string, opts ...trace.TracerOption) trace.Tracer { return GetTracerProvider().Tracer(name, opts...) } // GetTracerProvider returns the registered global trace provider. // If none is registered then an instance of NoopTracerProvider is returned. // // Use the trace provider to create a named tracer. E.g. // tracer := otel.GetTracerProvider().Tracer("example.com/foo") // or // tracer := otel.Tracer("example.com/foo") func GetTracerProvider() trace.TracerProvider { return global.TracerProvider() } // SetTracerProvider registers `tp` as the global trace provider. func SetTracerProvider(tp trace.TracerProvider) { global.SetTracerProvider(tp) }
containerd/containerd
vendor/go.opentelemetry.io/otel/trace.go
GO
apache-2.0
1,617
package org.apache.lucene.benchmark.byTask.tasks; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.util.HashMap; import java.util.Map; import org.apache.lucene.benchmark.byTask.PerfRunData; import org.apache.lucene.index.IndexWriter; /** * Commits the IndexWriter. * */ public class CommitIndexTask extends PerfTask { Map<String,String> commitUserData; public CommitIndexTask(PerfRunData runData) { super(runData); } @Override public boolean supportsParams() { return true; } @Override public void setParams(String params) { super.setParams(params); commitUserData = new HashMap<>(); commitUserData.put(OpenReaderTask.USER_DATA, params); } @Override public int doLogic() throws Exception { IndexWriter iw = getRunData().getIndexWriter(); if (iw != null) { if (commitUserData != null) { iw.setCommitData(commitUserData); } iw.commit(); } return 1; } }
smartan/lucene
src/main/java/org/apache/lucene/benchmark/byTask/tasks/CommitIndexTask.java
Java
apache-2.0
1,724
interface B { default void foo() {} } interface C { default void foo() {} } class <error descr="D inherits unrelated defaults for foo() from types B and C">D</error> implements B, C {}
android-ia/platform_tools_idea
java/java-tests/testData/codeInsight/daemonCodeAnalyzer/lambda/interfaceMethods/InheritUnrelatedDefaults.java
Java
apache-2.0
185
<?php /** * Zend Framework * * LICENSE * * This source file is subject to the new BSD license that is bundled * with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://framework.zend.com/license/new-bsd * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to license@zend.com so we can send you a copy immediately. * * @category Zend * @package Zend_Pdf * @copyright Copyright (c) 2005-2012 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License * @version $Id$ */ /** Zend_Pdf_Color */ require_once 'Zend/Pdf/Color.php'; /** * HTML color implementation * * Factory class which vends Zend_Pdf_Color objects from typical HTML * representations. * * @category Zend * @package Zend_Pdf * @copyright Copyright (c) 2005-2012 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ class Zend_Pdf_Color_Html extends Zend_Pdf_Color { /** * Color * * @var Zend_Pdf_Color */ private $_color; /** * Class constructor. * * @param mixed $color * @throws Zend_Pdf_Exception */ public function __construct($color) { $this->_color = self::color($color); } /** * Instructions, which can be directly inserted into content stream * to switch color. * Color set instructions differ for stroking and nonstroking operations. * * @param boolean $stroking * @return string */ public function instructions($stroking) { return $this->_color->instructions($stroking); } /** * Get color components (color space dependent) * * @return array */ public function getComponents() { return $this->_color->getComponents(); } /** * Creates a Zend_Pdf_Color object from the HTML representation. * * @param string $color May either be a hexidecimal number of the form * #rrggbb or one of the 140 well-known names (black, white, blue, etc.) * @return Zend_Pdf_Color */ public static function color($color) { $pattern = '/^#([A-Fa-f0-9]{2})([A-Fa-f0-9]{2})([A-Fa-f0-9]{2})$/'; if (preg_match($pattern, $color, $matches)) { $r = round((hexdec($matches[1]) / 255), 3); $g = round((hexdec($matches[2]) / 255), 3); $b = round((hexdec($matches[3]) / 255), 3); if (($r == $g) && ($g == $b)) { require_once 'Zend/Pdf/Color/GrayScale.php'; return new Zend_Pdf_Color_GrayScale($r); } else { require_once 'Zend/Pdf/Color/Rgb.php'; return new Zend_Pdf_Color_Rgb($r, $g, $b); } } else { return Zend_Pdf_Color_Html::namedColor($color); } } /** * Creates a Zend_Pdf_Color object from the named color. * * @param string $color One of the 140 well-known color names (black, white, * blue, etc.) * @return Zend_Pdf_Color */ public static function namedColor($color) { switch (strtolower($color)) { case 'aqua': $r = 0.0; $g = 1.0; $b = 1.0; break; case 'black': $r = 0.0; $g = 0.0; $b = 0.0; break; case 'blue': $r = 0.0; $g = 0.0; $b = 1.0; break; case 'fuchsia': $r = 1.0; $g = 0.0; $b = 1.0; break; case 'gray': $r = 0.502; $g = 0.502; $b = 0.502; break; case 'green': $r = 0.0; $g = 0.502; $b = 0.0; break; case 'lime': $r = 0.0; $g = 1.0; $b = 0.0; break; case 'maroon': $r = 0.502; $g = 0.0; $b = 0.0; break; case 'navy': $r = 0.0; $g = 0.0; $b = 0.502; break; case 'olive': $r = 0.502; $g = 0.502; $b = 0.0; break; case 'purple': $r = 0.502; $g = 0.0; $b = 0.502; break; case 'red': $r = 1.0; $g = 0.0; $b = 0.0; break; case 'silver': $r = 0.753; $g = 0.753; $b = 0.753; break; case 'teal': $r = 0.0; $g = 0.502; $b = 0.502; break; case 'white': $r = 1.0; $g = 1.0; $b = 1.0; break; case 'yellow': $r = 1.0; $g = 1.0; $b = 0.0; break; case 'aliceblue': $r = 0.941; $g = 0.973; $b = 1.0; break; case 'antiquewhite': $r = 0.980; $g = 0.922; $b = 0.843; break; case 'aquamarine': $r = 0.498; $g = 1.0; $b = 0.831; break; case 'azure': $r = 0.941; $g = 1.0; $b = 1.0; break; case 'beige': $r = 0.961; $g = 0.961; $b = 0.863; break; case 'bisque': $r = 1.0; $g = 0.894; $b = 0.769; break; case 'blanchedalmond': $r = 1.0; $g = 1.0; $b = 0.804; break; case 'blueviolet': $r = 0.541; $g = 0.169; $b = 0.886; break; case 'brown': $r = 0.647; $g = 0.165; $b = 0.165; break; case 'burlywood': $r = 0.871; $g = 0.722; $b = 0.529; break; case 'cadetblue': $r = 0.373; $g = 0.620; $b = 0.627; break; case 'chartreuse': $r = 0.498; $g = 1.0; $b = 0.0; break; case 'chocolate': $r = 0.824; $g = 0.412; $b = 0.118; break; case 'coral': $r = 1.0; $g = 0.498; $b = 0.314; break; case 'cornflowerblue': $r = 0.392; $g = 0.584; $b = 0.929; break; case 'cornsilk': $r = 1.0; $g = 0.973; $b = 0.863; break; case 'crimson': $r = 0.863; $g = 0.078; $b = 0.235; break; case 'cyan': $r = 0.0; $g = 1.0; $b = 1.0; break; case 'darkblue': $r = 0.0; $g = 0.0; $b = 0.545; break; case 'darkcyan': $r = 0.0; $g = 0.545; $b = 0.545; break; case 'darkgoldenrod': $r = 0.722; $g = 0.525; $b = 0.043; break; case 'darkgray': $r = 0.663; $g = 0.663; $b = 0.663; break; case 'darkgreen': $r = 0.0; $g = 0.392; $b = 0.0; break; case 'darkkhaki': $r = 0.741; $g = 0.718; $b = 0.420; break; case 'darkmagenta': $r = 0.545; $g = 0.0; $b = 0.545; break; case 'darkolivegreen': $r = 0.333; $g = 0.420; $b = 0.184; break; case 'darkorange': $r = 1.0; $g = 0.549; $b = 0.0; break; case 'darkorchid': $r = 0.6; $g = 0.196; $b = 0.8; break; case 'darkred': $r = 0.545; $g = 0.0; $b = 0.0; break; case 'darksalmon': $r = 0.914; $g = 0.588; $b = 0.478; break; case 'darkseagreen': $r = 0.561; $g = 0.737; $b = 0.561; break; case 'darkslateblue': $r = 0.282; $g = 0.239; $b = 0.545; break; case 'darkslategray': $r = 0.184; $g = 0.310; $b = 0.310; break; case 'darkturquoise': $r = 0.0; $g = 0.808; $b = 0.820; break; case 'darkviolet': $r = 0.580; $g = 0.0; $b = 0.827; break; case 'deeppink': $r = 1.0; $g = 0.078; $b = 0.576; break; case 'deepskyblue': $r = 0.0; $g = 0.749; $b = 1.0; break; case 'dimgray': $r = 0.412; $g = 0.412; $b = 0.412; break; case 'dodgerblue': $r = 0.118; $g = 0.565; $b = 1.0; break; case 'firebrick': $r = 0.698; $g = 0.133; $b = 0.133; break; case 'floralwhite': $r = 1.0; $g = 0.980; $b = 0.941; break; case 'forestgreen': $r = 0.133; $g = 0.545; $b = 0.133; break; case 'gainsboro': $r = 0.863; $g = 0.863; $b = 0.863; break; case 'ghostwhite': $r = 0.973; $g = 0.973; $b = 1.0; break; case 'gold': $r = 1.0; $g = 0.843; $b = 0.0; break; case 'goldenrod': $r = 0.855; $g = 0.647; $b = 0.125; break; case 'greenyellow': $r = 0.678; $g = 1.0; $b = 0.184; break; case 'honeydew': $r = 0.941; $g = 1.0; $b = 0.941; break; case 'hotpink': $r = 1.0; $g = 0.412; $b = 0.706; break; case 'indianred': $r = 0.804; $g = 0.361; $b = 0.361; break; case 'indigo': $r = 0.294; $g = 0.0; $b = 0.510; break; case 'ivory': $r = 1.0; $g = 0.941; $b = 0.941; break; case 'khaki': $r = 0.941; $g = 0.902; $b = 0.549; break; case 'lavender': $r = 0.902; $g = 0.902; $b = 0.980; break; case 'lavenderblush': $r = 1.0; $g = 0.941; $b = 0.961; break; case 'lawngreen': $r = 0.486; $g = 0.988; $b = 0.0; break; case 'lemonchiffon': $r = 1.0; $g = 0.980; $b = 0.804; break; case 'lightblue': $r = 0.678; $g = 0.847; $b = 0.902; break; case 'lightcoral': $r = 0.941; $g = 0.502; $b = 0.502; break; case 'lightcyan': $r = 0.878; $g = 1.0; $b = 1.0; break; case 'lightgoldenrodyellow': $r = 0.980; $g = 0.980; $b = 0.824; break; case 'lightgreen': $r = 0.565; $g = 0.933; $b = 0.565; break; case 'lightgrey': $r = 0.827; $g = 0.827; $b = 0.827; break; case 'lightpink': $r = 1.0; $g = 0.714; $b = 0.757; break; case 'lightsalmon': $r = 1.0; $g = 0.627; $b = 0.478; break; case 'lightseagreen': $r = 0.125; $g = 0.698; $b = 0.667; break; case 'lightskyblue': $r = 0.529; $g = 0.808; $b = 0.980; break; case 'lightslategray': $r = 0.467; $g = 0.533; $b = 0.6; break; case 'lightsteelblue': $r = 0.690; $g = 0.769; $b = 0.871; break; case 'lightyellow': $r = 1.0; $g = 1.0; $b = 0.878; break; case 'limegreen': $r = 0.196; $g = 0.804; $b = 0.196; break; case 'linen': $r = 0.980; $g = 0.941; $b = 0.902; break; case 'magenta': $r = 1.0; $g = 0.0; $b = 1.0; break; case 'mediumaquamarine': $r = 0.4; $g = 0.804; $b = 0.667; break; case 'mediumblue': $r = 0.0; $g = 0.0; $b = 0.804; break; case 'mediumorchid': $r = 0.729; $g = 0.333; $b = 0.827; break; case 'mediumpurple': $r = 0.576; $g = 0.439; $b = 0.859; break; case 'mediumseagreen': $r = 0.235; $g = 0.702; $b = 0.443; break; case 'mediumslateblue': $r = 0.482; $g = 0.408; $b = 0.933; break; case 'mediumspringgreen': $r = 0.0; $g = 0.980; $b = 0.604; break; case 'mediumturquoise': $r = 0.282; $g = 0.820; $b = 0.8; break; case 'mediumvioletred': $r = 0.780; $g = 0.082; $b = 0.522; break; case 'midnightblue': $r = 0.098; $g = 0.098; $b = 0.439; break; case 'mintcream': $r = 0.961; $g = 1.0; $b = 0.980; break; case 'mistyrose': $r = 1.0; $g = 0.894; $b = 0.882; break; case 'moccasin': $r = 1.0; $g = 0.894; $b = 0.710; break; case 'navajowhite': $r = 1.0; $g = 0.871; $b = 0.678; break; case 'oldlace': $r = 0.992; $g = 0.961; $b = 0.902; break; case 'olivedrab': $r = 0.420; $g = 0.557; $b = 0.137; break; case 'orange': $r = 1.0; $g = 0.647; $b = 0.0; break; case 'orangered': $r = 1.0; $g = 0.271; $b = 0.0; break; case 'orchid': $r = 0.855; $g = 0.439; $b = 0.839; break; case 'palegoldenrod': $r = 0.933; $g = 0.910; $b = 0.667; break; case 'palegreen': $r = 0.596; $g = 0.984; $b = 0.596; break; case 'paleturquoise': $r = 0.686; $g = 0.933; $b = 0.933; break; case 'palevioletred': $r = 0.859; $g = 0.439; $b = 0.576; break; case 'papayawhip': $r = 1.0; $g = 0.937; $b = 0.835; break; case 'peachpuff': $r = 1.0; $g = 0.937; $b = 0.835; break; case 'peru': $r = 0.804; $g = 0.522; $b = 0.247; break; case 'pink': $r = 1.0; $g = 0.753; $b = 0.796; break; case 'plum': $r = 0.867; $g = 0.627; $b = 0.867; break; case 'powderblue': $r = 0.690; $g = 0.878; $b = 0.902; break; case 'rosybrown': $r = 0.737; $g = 0.561; $b = 0.561; break; case 'royalblue': $r = 0.255; $g = 0.412; $b = 0.882; break; case 'saddlebrown': $r = 0.545; $g = 0.271; $b = 0.075; break; case 'salmon': $r = 0.980; $g = 0.502; $b = 0.447; break; case 'sandybrown': $r = 0.957; $g = 0.643; $b = 0.376; break; case 'seagreen': $r = 0.180; $g = 0.545; $b = 0.341; break; case 'seashell': $r = 1.0; $g = 0.961; $b = 0.933; break; case 'sienna': $r = 0.627; $g = 0.322; $b = 0.176; break; case 'skyblue': $r = 0.529; $g = 0.808; $b = 0.922; break; case 'slateblue': $r = 0.416; $g = 0.353; $b = 0.804; break; case 'slategray': $r = 0.439; $g = 0.502; $b = 0.565; break; case 'snow': $r = 1.0; $g = 0.980; $b = 0.980; break; case 'springgreen': $r = 0.0; $g = 1.0; $b = 0.498; break; case 'steelblue': $r = 0.275; $g = 0.510; $b = 0.706; break; case 'tan': $r = 0.824; $g = 0.706; $b = 0.549; break; case 'thistle': $r = 0.847; $g = 0.749; $b = 0.847; break; case 'tomato': $r = 0.992; $g = 0.388; $b = 0.278; break; case 'turquoise': $r = 0.251; $g = 0.878; $b = 0.816; break; case 'violet': $r = 0.933; $g = 0.510; $b = 0.933; break; case 'wheat': $r = 0.961; $g = 0.871; $b = 0.702; break; case 'whitesmoke': $r = 0.961; $g = 0.961; $b = 0.961; break; case 'yellowgreen': $r = 0.604; $g = 0.804; $b = 0.196; break; default: require_once 'Zend/Pdf/Exception.php'; throw new Zend_Pdf_Exception('Unknown color name: ' . $color); } if (($r == $g) && ($g == $b)) { require_once 'Zend/Pdf/Color/GrayScale.php'; return new Zend_Pdf_Color_GrayScale($r); } else { require_once 'Zend/Pdf/Color/Rgb.php'; return new Zend_Pdf_Color_Rgb($r, $g, $b); } } }
svn2github/zend_framework
library/Zend/Pdf/Color/Html.php
PHP
bsd-3-clause
16,236
#include "main.h" #include "File.h" #include "Geometry.h" namespace ImageStack { namespace FileTGA { /* typedef struct { unsigned char identsize; // size of ID field that follows 18 byte header (0 usually) unsigned char colormaptype; // type of colour map 0=none, 1=has palette unsigned char imagetype; // type of image 0=none,1=indexed,2=rgb,3=grey,+8=rle packed char colormap[5]; // crap to do with the color map short xstart; // image x origin short ystart; // image y origin short width; // image width in pixels short height; // image height in pixels unsigned char bits; // image bits per pixel 8,16,24,32 unsigned char descriptor; // image descriptor bits (vh flip bits) } Header; */ void help() { printf(".tga files. These can have 1, 3, or 4 channels, are run-length encoded, and\n" "are low dynamic range.\n"); } Image load(string filename) { FILE *f = fopen(filename.c_str(), "rb"); assert(f, "Could not open file %s\n", filename.c_str()); unsigned char identsize, colormaptype, imagetype, bits; int width, height; identsize = fgetc(f); colormaptype = fgetc(f); imagetype = fgetc(f); // skip the colormap for (int i = 0; i < 5; i++) { fgetc(f); } // skip xstart and ystart for (int i = 0; i < 4; i++) { fgetc(f); } width = fgetc(f); width += (fgetc(f) << 8); height = fgetc(f); height += (fgetc(f) << 8); bits = fgetc(f); // skip the descriptor fgetc(f); // skip the ident stuff for (int i = 0; i < identsize; i++) { fgetc(f); } // check the colormaptype assert(colormaptype == 0, "ImageStack can't read tgas with a color map"); int channels = 0; bool rle = false; switch (imagetype) { case 2: // rgb channels = 3; rle = false; break; case 3: // gray channels = 1; rle = false; break; case 10: // rgb rle channels = 3; rle = true; break; case 11: // gray rle channels = 1; rle = true; break; default: panic("ImageStack can't load this type of tga (type %i)\n", imagetype); } // check for an alpha channel if (bits == 32 && channels == 3) { channels = 4; } assert(bits == 8 * channels, "ImageStack only supports 8 bits per channel tgas (this one has %i bits for %i channels)\n", bits, channels); Image im(width, height, 1, channels); bool vflip = true; //!(descriptor & 0x10); if (!rle && channels == 1) { for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { im(x, y) = LDRtoHDR(fgetc(f)); } } } else if (!rle && channels == 3) { for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { im(x, y, 2) = LDRtoHDR(fgetc(f)); im(x, y, 1) = LDRtoHDR(fgetc(f)); im(x, y, 0) = LDRtoHDR(fgetc(f)); } } } else if (!rle && channels == 4) { for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { im(x, y, 2) = LDRtoHDR(fgetc(f)); im(x, y, 1) = LDRtoHDR(fgetc(f)); im(x, y, 0) = LDRtoHDR(fgetc(f)); im(x, y, 3) = LDRtoHDR(fgetc(f)); } } } else if (rle && channels == 1) { for (int x = 0, y = 0; y < height;) { unsigned char ch = fgetc(f); unsigned char runlength = ch & 0x7f; if (ch & 0x80) { // compressed float val = LDRtoHDR(fgetc(f)); for (int j = 0; j < runlength; j++) { im(x, y) = val; x++; if (x == width) {x = 0; y++;} } } else { // normal for (int j = 0; j < runlength; j++) { im(x, y) = LDRtoHDR(fgetc(f)); x++; if (x == width) {x = 0; y++;} } } } } else if (rle && channels == 3) { for (int x = 0, y = 0; y < height;) { unsigned char ch = fgetc(f); unsigned char runlength = ch & 0x7f; if (ch & 0x80) { // compressed float b = LDRtoHDR(fgetc(f)); float g = LDRtoHDR(fgetc(f)); float r = LDRtoHDR(fgetc(f)); for (int j = 0; j < runlength; j++) { im(x, y, 0) = r; im(x, y, 1) = g; im(x, y, 2) = b; x++; if (x == width) {x = 0; y++;} } } else { // normal for (int j = 0; j < runlength; j++) { im(x, y, 0) = LDRtoHDR(fgetc(f)); im(x, y, 1) = LDRtoHDR(fgetc(f)); im(x, y, 2) = LDRtoHDR(fgetc(f)); x++; if (x == width) {x = 0; y++;} } } } } else if (rle && channels == 4) { for (int x = 0, y = 0; y < height;) { unsigned char ch = fgetc(f); unsigned char runlength = ch & 0x7f; if (ch & 0x80) { // compressed float b = LDRtoHDR(fgetc(f)); float g = LDRtoHDR(fgetc(f)); float r = LDRtoHDR(fgetc(f)); float a = LDRtoHDR(fgetc(f)); for (int j = 0; j < runlength; j++) { im(x, y, 0) = r; im(x, y, 1) = g; im(x, y, 2) = b; im(x, y, 3) = a; x++; if (x == width) {x = 0; y++;} } } else { // normal for (int j = 0; j < runlength; j++) { im(x, y, 0) = LDRtoHDR(fgetc(f)); im(x, y, 1) = LDRtoHDR(fgetc(f)); im(x, y, 2) = LDRtoHDR(fgetc(f)); im(x, y, 3) = LDRtoHDR(fgetc(f)); x++; if (x == width) {x = 0; y++;} } } } } fclose(f); if (vflip) Flip::apply(im, 'y'); return im; } void save(Image im, string filename) { FILE *f = fopen(filename.c_str(), "wb"); assert(f, "Could not open file %s\n", filename.c_str()); assert(im.frames == 1, "can only save single frame tgas\n"); assert(im.channels == 4 || im.channels == 3 || im.channels == 1, "can only save tgas with one, three, or four channels\n"); fputc(0, f); // identsize fputc(0, f); // colormaptype fputc(im.channels == 1 ? 3 : 2, f); // gray or rgb fputc(0, f); // colormap stuff fputc(0, f); fputc(0, f); fputc(0, f); fputc(0, f); fputc(0, f); fputc(0, f); // x origin fputc(0, f); fputc(0, f); // y origin fputc(im.width & 255, f); fputc((im.width >> 8) & 255, f); // width fputc(im.height & 255, f); fputc((im.height >> 8) & 255, f); // height fputc(im.channels * 8, f); // bits fputc(0, f); // descriptor if (im.channels == 1) { for (int y = im.height-1; y >= 0; y--) { for (int x = 0; x < im.width; x++) { fputc(HDRtoLDR(im(x, y)), f); } } } else if (im.channels == 3) { for (int y = im.height-1; y >= 0; y--) { for (int x = 0; x < im.width; x++) { fputc(HDRtoLDR(im(x, y, 2)), f); fputc(HDRtoLDR(im(x, y, 1)), f); fputc(HDRtoLDR(im(x, y, 0)), f); } } } else if (im.channels == 4) { for (int y = im.height-1; y >= 0; y--) { for (int x = 0; x < im.width; x++) { fputc(HDRtoLDR(im(x, y, 2)), f); fputc(HDRtoLDR(im(x, y, 1)), f); fputc(HDRtoLDR(im(x, y, 0)), f); fputc(HDRtoLDR(im(x, y, 3)), f); } } } fclose(f); } } }
ricoyoung345/imagestack
src/FileTGA.cpp
C++
bsd-3-clause
8,011